diff --git a/COPYRIGHT b/COPYRIGHT new file mode 100644 index 0000000000..3d9e89ee30 --- /dev/null +++ b/COPYRIGHT @@ -0,0 +1,5 @@ + +Copyright 2010-2012 Artefactual Systems Inc. + +Please note, Archivematica also includes several third-party libraries, each with their own copyright and license terms. See http://archivematica.org/software. + diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000..2def0e8831 --- /dev/null +++ b/LICENSE @@ -0,0 +1,661 @@ + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. \ No newline at end of file diff --git a/TRADEMARK b/TRADEMARK new file mode 100644 index 0000000000..cfb26e8936 --- /dev/null +++ b/TRADEMARK @@ -0,0 +1,6 @@ +Artefactual Systems Inc. owns all Archivematica trademarks, service marks, and graphic logos. + +Archivematica's LICENSE does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor except as required for reasonable and customary use in describing the origin of the Work. + +Guidelines for the use of Archivematica trademarks, service marks, and graphic logos are available at http://archivematica.org/trademark. + diff --git a/dev-helper b/dev-helper new file mode 100755 index 0000000000..3f0934eeb0 --- /dev/null +++ b/dev-helper @@ -0,0 +1,198 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @author Joseph Perry +# @version svn: $Id$ + +cd `dirname $0` + +fn_exists() +{ + type $1 2>/dev/null | grep -q 'is a function' +} + +colour () { "$@" 2>&1>&3|sed 's,.*,\x1B[31m&\x1B[0m,'>&2;} 3>&1 + +function svn-update() { + part="svn up" + echo -n "\"Would you like to ${part}?\" (y/N) " + read a + if [[ $a == "Y" || $a == "y" ]]; then + echo "Doing ${part} ..." + echo "Cleaning up..." + cd localDevSetup/ + colour ./cleanup.sh + cd .. + SVNVERSION="`svnversion`" + echo "Updating from ${SVNVERSION}. Bringing latest changes to the working copy..." + svn update + + echo "Integrating into environment..." + cd localDevSetup/ + colour ./createLocalDevDirectories.sh + cd .. + else + echo "not going to ${part}" + fi +} + +function package-update() { + part="update/install package requirements" + echo -n "\"Would you like to ${part}?\" (y/N) " + read a + if [[ $a == "Y" || $a == "y" ]]; then + echo "Going to ${part} ..." + cd localDevSetup/ + colour sudo apt-get update + colour sudo ./installDependsFromDebianFile.py ./../src/archivematicaCommon/debian/control + #colour sudo ./installDependsFromDebianFile.py ./../src/createDublinCore/debian/control + colour sudo ./installDependsFromDebianFile.py ./../src/dashboard/debian/control + #./installDependsFromDebianFile.py ./../src/easy-extract/debian/control + colour sudo ./installDependsFromDebianFile.py ./../src/MCPClient/debian/control + colour sudo ./installDependsFromDebianFile.py ./../src/MCPrpcCLI/debian/control + colour sudo ./installDependsFromDebianFile.py ./../src/MCPServer/debian/control + colour #./installDependsFromDebianFile.py ./../src/metaPackage-forensic-tools/debian/control + colour #./installDependsFromDebianFile.py ./../src/metaPackage-shotgun/debian/control + colour sudo ./installDependsFromDebianFile.py ./../src/sanitizeNames/debian/control + colour sudo ./installDependsFromDebianFile.py ./../src/SIPCreationTools/debian/control + colour sudo ./installDependsFromDebianFile.py ./../src/transcoder/debian/control + colour sudo ./installDependsFromDebianFile.py ./../src/upload-qubit/debian/control + colour #./installDependsFromDebianFile.py ./../src/vm-includes/debian/control + colour sudo ./installDependsFromDebianFile.py ./../src/xubuntuGuiScriptsEditor/debian/control + cd .. + else + echo "Not going to ${part}." + fi +} + +function recreate-db() { + part="recreate the databases" + echo -n "\"Would you like to ${part}?\" (y/N) " + read a + if [[ $a == "Y" || $a == "y" ]]; then + echo "Going to ${part} ..." + cd localDevSetup/ + sudo stop archivematica-mcp-server + sudo stop archivematica-mcp-client + sudo apache2ctl stop + colour ./recreateDB.sh + cd .. + sudo start archivematica-mcp-server + sudo start archivematica-mcp-client + sudo apachectl start + else + echo "Not going to ${part}." + fi +} + +function restart() { + part="restart archivematica services" + echo -n "\"Would you like to ${part}?\" (y/N) " + read a + if [[ $a == "Y" || $a == "y" ]]; then + echo "Going to ${part} ..." + sudo stop archivematica-mcp-server + sleep 1 + sudo stop archivematica-mcp-client + sleep 1 + + if [ -e "/tmp/archivematicaMCPServerPID" ] + then + sudo kill -9 `cat /tmp/archivematicaMCPServerPID` + fi + + sleep 3 + sudo rm /tmp/archivematicaMCP* + colour sudo start archivematica-mcp-server + colour sudo start archivematica-mcp-client + colour sudo apache2ctl restart + else + echo "Not going to ${part}." + fi +} + +function export-sampledata() { + part="export sample data to ~/sampledata" + echo -n "\"Would you like to ${part}?\" (y/N) " + read a + if [[ $a == "Y" || $a == "y" ]]; then + rev="`svn info | grep \"Revision: \" | awk -F " " '{print $2}'`" + echo Going to export to '~/'sampledata-$rev ... + colour svn export sampledata ~/sampledata-${rev} + else + echo "Not going to ${part}." + fi +} + +function install-gui() { + part="re-create gui-scripts editor file" + echo -n "\"Would you like to ${part}?\" (y/N) " + read a + if [[ $a == "Y" || $a == "y" ]]; then + rm /home/$USER/.config/Thunar/uca.xml + tmp="`pwd`" + cd src/xubuntuGuiScriptsEditor/share + colour ./addArchivematicaGUIScripts.sh + cd "$tmp" + else + echo "Not going to ${part}." + fi +} + +function update-qubit() { + part="update Qubit (trunk) and restart its qubit-sword service" + echo -n "\"Would you like to ${part}?\" (y/N) " + read a + if [[ $a == "Y" || $a == "y" ]]; then + sudo chmod -R 777 qubit-svn + colour svn update qubit-svn + sudo chmod -R 777 qubit-svn + colour sudo stop qubit-sword + colour sudo start qubit-sword + echo "Remember to activate sfSwordPlugin and restart Qubit database if necessary!" + else + echo "Not going to ${part}." + fi +} + +function example() { + part="example" + echo -n "\"Would you like to ${part}?\" (y/N) " + read a + if [[ $a == "Y" || $a == "y" ]]; then + echo "Going to ${part} ..." + else + echo "Not going to ${part}" + fi +} + +# Run only one function if given +if [ ! -z ${1} ]; then + ${1} + exit +fi + +svn-update +package-update +recreate-db +restart +export-sampledata +install-gui +update-qubit diff --git a/dev-installer b/dev-installer new file mode 100755 index 0000000000..4293a6726d --- /dev/null +++ b/dev-installer @@ -0,0 +1,190 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @author Joseph Perry +# @version svn: $Id$ + +cd `dirname $0` + +part="create the archivematica user" +echo -n "\"Would you like to ${part}?\" (y/N) " +read a +if [[ $a == "Y" || $a == "y" ]]; then + echo "Going to ${part} ..." + sudo adduser --uid 333 --group --system --home /var/lib/archivematica/ archivematica + sudo gpasswd -a $USER archivematica +else + echo "Not going to ${part}" +fi + + +part="install Gearman" +echo -n "\"Would you like to ${part}?\" (y/N) " +read a +if [[ $a == "Y" || $a == "y" ]]; then + echo "Going to ${part} ..." + sudo apt-get install gearman + sudo apt-get install python-setuptools + sudo apt-get install python-gearman + #sudo easy_install gearman +else + echo "Not going to ${part}" +fi + +part="install/update MySQL" +echo -n "\"Would you like to ${part}?\" (y/N) " +read a +if [[ $a == "Y" || $a == "y" ]]; then + echo "Going to ${part} ..." + sudo apt-get install mysql-server -y +else + echo "Not going to ${part}" +fi + +part="install/update Apache" +echo -n "\"Would you like to ${part}?\" (y/N) " +read a +if [[ $a == "Y" || $a == "y" ]]; then + echo "Going to ${part} ..." + sudo apt-get install apache2 -y +else + echo "Not going to ${part}" +fi + +part="add ppa repositories" +echo -n "\"Would you like to ${part}?\" (y/N) " +read a +if [[ $a == "Y" || $a == "y" ]]; then + sudo apt-get install python-software-properties -y + sudo add-apt-repository ppa:archivematica/externals-dev + #sudo add-apt-repository ppa:twisted-dev/ppa + sudo apt-get update +else + echo "Not going to ${part}" +fi + +part="run the MCP Server preMCPLogging.sh" +echo -n "\"Would you like to ${part}?\" (y/N) " +read a +if [[ $a == "Y" || $a == "y" ]]; then + echo "Going to ${part} ..." + tmp="`pwd`" + cd ./src/MCPServer/share/ + sudo ./preMCPLogging.sh + cd "$tmp" +else + echo "Not going to ${part}" +fi + +part="run the MCP Server postinst script" +echo -n "\"Would you like to ${part}?\" (y/N) " +read a +if [[ $a == "Y" || $a == "y" ]]; then + echo "Going to ${part} ..." + sudo ./src/MCPServer/debian/postinst +else + echo "Not going to ${part}" +fi + +part="install ICA-AtoM" +echo -n "\"Would you like to ${part}?\" (y/N) " +read a +if [[ $a == "Y" || $a == "y" ]]; then + + # { GIT } + # sudo apt-get --quiet --quiet --yes install git-core + # { SVN } + sudo apt-get --quiet --quiet --yes install subversion + + # Working directory + WDIR=$(pwd) + + # Remove + sudo rm -rf /var/www/ica-atom + sudo rm -rf ${WDIR}/qubit-git # Old name + sudo rm -rf ${WDIR}/ica-atom-svn # Old name + sudo rm -rf ${WDIR}/qubit-svn + + # { GIT } + # Clone git repository, create bran experimental from original/experimental + # git clone git://github.com/sevein/qubit.git ${WDIR}/qubit-git + # cd ${WDIR}/qubit-git + # git checkout --track origin/experimental + # cd ${WDIR} + # { SVN } + # Checkout SVN repo + svn checkout http://qubit-toolkit.googlecode.com/svn/trunk ${WDIR}/qubit-svn + + # Link + # /var/www is created by the Apache package + # so it needs to be installer at this point + sudo ln -sf ${WDIR}/qubit-svn /var/www/ica-atom + + # Permissions + sudo chown -R www-data:www-data ${WDIR}/qubit-svn + + # Restart database + echo "Enter mysql root password (hit enter if blank)" + mysql -u root --execute="DROP DATABASE IF EXISTS qubit; CREATE DATABASE qubit CHARACTER SET utf8 COLLATE utf8_unicode_ci;" + + # Workaround for issue 1188 + sudo rm -rf ${WDIR}/qubit-svn/cache/* + sudo -u www-data php ${WDIR}/qubit-svn/symfony cc + + # Restart Apache (not really needed) + sudo apache2ctl restart + +else + echo "Not going to ${part}." +fi + +part="reinstall archivematica upstart services - requires restart" +echo -n "\"Would you like to ${part}?\" (y/N) " +read a +if [[ $a == "Y" || $a == "y" ]]; then + echo "Going to ${part} ..." + sudo stop archivematica-mcp-client + sleep 1 + sudo stop archivematica-mcp-server + sleep 3 + sudo stop qubit-sword + sudo rm \ + /etc/init/archivematica-mcp-server.conf \ + /etc/init/archivematica-mcp-client.conf \ + /etc/init/qubit-sword.conf \ + /etc/init/openoffice-service.conf + sudo ln src/MCPServer/init/archivematica-mcp-server.conf /etc/init/ + sudo ln src/MCPClient/init/archivematica-mcp-client.conf /etc/init/ + #sudo ln src/MCPClient/init/openoffice-service.conf /etc/init/ + sudo ln qubit-svn/init/qubit-sword.conf /etc/init/ +else + echo "Not going to ${part}." +fi + +exit + +part="example" +echo -n "\"Would you like to ${part}?\" (y/N) " +read a +if [[ $a == "Y" || $a == "y" ]]; then + echo "Going to ${part} ..." +else + echo "Not going to ${part}" +fi diff --git a/externals/fits/archivematicaConfigs/fits.xml b/externals/fits/archivematicaConfigs/fits.xml new file mode 100755 index 0000000000..12cb64051e --- /dev/null +++ b/externals/fits/archivematicaConfigs/fits.xml @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + true + true + true + xml/fits_output.xsd + http://hul.harvard.edu/ois/xml/xsd/fits/fits_output.xsd + http://hul.harvard.edu/ois/xml/ns/fits/fits_output + + + + DROID_SignatureFile_V35.xml + + diff --git a/localDevSetup/README.txt b/localDevSetup/README.txt new file mode 100644 index 0000000000..5dfdb5ed36 --- /dev/null +++ b/localDevSetup/README.txt @@ -0,0 +1,3 @@ + +Please Read +http://archivematica.org/wiki/index.php?title=Development_environment diff --git a/localDevSetup/cleanup.sh b/localDevSetup/cleanup.sh new file mode 100755 index 0000000000..d490b383c2 --- /dev/null +++ b/localDevSetup/cleanup.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @author Joseph Perry +# @version svn: $Id$ + +if [ -e /usr/share/fits/xml/fits.xmlbackup ]; then + sudo rm /usr/share/fits/xml/fits.xml +fi + +sudo rm -r /usr/lib/archivematica +sudo rm -r /etc/archivematica +sudo rm -r /usr/share/archivematica + +sudo rm /usr/bin/upload-qubit +sudo rm /usr/bin/transcoder +sudo rm /usr/bin/archivematicaCreateMD5 +sudo rm /usr/bin/archivematicaRestructureForCompliance +sudo rm /usr/bin/sanitizeNames + +sudo rm -r /usr/lib/sanitizeNames + +sudo rm -r /var/archivematica/ + +sudo rm /etc/apache2/sites-enabled/000-default diff --git a/localDevSetup/createDatabases/postBuildRun.sh b/localDevSetup/createDatabases/postBuildRun.sh new file mode 100755 index 0000000000..4ae5fef1b7 --- /dev/null +++ b/localDevSetup/createDatabases/postBuildRun.sh @@ -0,0 +1,18 @@ +echo "The default password is demo" + +stty -echo +read -p "Enter mysql root password[hit Enter if blank]: " dpPassword; echo +stty echo + +if [ -n "$dpPassword" ] ; then + dpPassword="-p${dpPassword}" +fi + +cd postBuildRunAssistScripts +./preMCPLogging.sh "$dpPassword" +sudo mysqladmin create ica-atom $dpPassword +sudo mysqladmin create dcb $dpPassword +sudo mysqladmin create qubit $dpPassword +sudo mysqladmin create dashboard $dpPassword + +dpPassword="" diff --git a/localDevSetup/createDatabases/postBuildRunAssistScripts/preMCPLogging.sh b/localDevSetup/createDatabases/postBuildRunAssistScripts/preMCPLogging.sh new file mode 100755 index 0000000000..98abfb8f52 --- /dev/null +++ b/localDevSetup/createDatabases/postBuildRunAssistScripts/preMCPLogging.sh @@ -0,0 +1,13 @@ +databaseName="MCP" +username="demo" +password="demo" +dpPassword="$1" +sudo mysqladmin create "$databaseName" $dpPassword +#sudo mysql $databaseName +sudo mysql $dpPassword --execute="source ../../../src/MCPServer/share/mysql" "$databaseName" +sudo mysql $dpPassword --execute="CREATE USER '${username}'@'localhost' IDENTIFIED BY '${password}'" +sudo mysql $dpPassword --execute="GRANT SELECT, UPDATE, INSERT, DELETE ON ${databaseName}.* TO '${username}'@'localhost'" + + +#to delete the database and all of it's contents +# sudo mysqladmin drop MCP diff --git a/localDevSetup/createLocalDevDirectories.sh b/localDevSetup/createLocalDevDirectories.sh new file mode 100755 index 0000000000..d9f809d0ef --- /dev/null +++ b/localDevSetup/createLocalDevDirectories.sh @@ -0,0 +1,109 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @author Joseph Perry +# @version svn: $Id$ + +set +e +origDir="`pwd`/" +cd ../ +svnDir="`pwd`/" + +lib="/usr/lib/archivematica" +sudo mkdir $lib +etc="/etc/archivematica" +sudo mkdir $etc +share="/usr/share/archivematica" +sudo mkdir $share + +sudo ln -s "${svnDir}src/MCPServer/etc" "${etc}/MCPServer" +sudo ln -s "${svnDir}src/MCPClient/etc" "${etc}/MCPClient" +sudo ln -s "${svnDir}src/archivematicaCommon/etc" "${etc}/archivematicaCommon" +sudo ln -s "${svnDir}src/SIPCreationTools/etc/" "${etc}/SIPCreationTools" +sudo ln -s "${svnDir}src/transcoder/etc" "${etc}/transcoder" + + +sudo ln -s "${svnDir}src/MCPServer/lib/" "${lib}/MCPServer" +sudo ln -s "${svnDir}src/MCPClient/lib/" "${lib}/MCPClient" +sudo ln -s "${svnDir}src/archivematicaCommon/lib/" "${lib}/archivematicaCommon" +sudo ln -s "${svnDir}src/SIPCreationTools/lib/" "${lib}/SIPCreationTools" +sudo ln -s "${svnDir}src/upload-qubit/lib/" "${lib}/upload-qubit" +sudo ln -s "${svnDir}src/transcoder/lib/" "${lib}/transcoder" +sudo ln -s "${svnDir}src/sanitizeNames/lib/" "/usr/lib/sanitizeNames" +sudo ln -s "${svnDir}src/dashboard/src/" "${share}/dashboard" +sudo ln "${svnDir}src/SIPCreationTools/bin/archivematicaCreateMD5" "/usr/bin/" +sudo ln "${svnDir}src/SIPCreationTools/bin/archivematicaRestructureForCompliance" "/usr/bin/" + +if [ ! -e /etc/init/archivematica-mcp-server.conf ] ; then + sudo ln "${svnDir}src/MCPServer/init/archivematica-mcp-server.conf" "/etc/init/" +fi +if [ ! -e /etc/init/archivematica-mcp-client.conf ] ; then + sudo ln "${svnDir}src/MCPClient/init/archivematica-mcp-client.conf" "/etc/init/" +fi +if [ -e /etc/init/openoffice-service.conf ] ; then + sudo stop openoffice-service + sudo rm "/etc/init/openoffice-service.conf" +fi +if [ ! -e /etc/init/qubit-sword.conf ] ; then + sudo ln "${svnDir}qubit-svn/init/qubit-sword.conf" "/etc/init/" +fi + +sudo ln "${svnDir}src/upload-qubit/upload-qubit" "/usr/bin/" +sudo ln "${svnDir}src/transcoder/bin/transcoder" "/usr/bin/" +sudo ln "${svnDir}src/sanitizeNames/bin/sanitizeNames" "/usr/bin/" + +sudo ln "${svnDir}src/vm-includes/share/apache.default" "/etc/apache2/sites-enabled/000-default" -f +sudo ln "${svnDir}src/vm-includes/share/apache.default" "/etc/apache2/sites-available/default" -f +sudo ln -sf "${svnDir}src/vm-includes/share/httpd.conf" "/etc/apache2/httpd.conf" + +sudo ln -sf "${svnDir}qubit-svn" /var/www/ica-atom +sudo chown -R www-data:www-data "${svnDir}qubit-svn" + +if [ ! -e /usr/share/fits/xml/fits.xmlbackup ]; then +sudo cp /usr/share/fits/xml/fits.xml /usr/share/fits/xml/fits.xmlbackup +fi +sudo ln -f "${svnDir}externals/fits/archivematicaConfigs/fits.xml" /usr/share/fits/xml/ +sudo chmod 775 /usr/share/fits/xml/fits.xml + +sudo mkdir /var/archivematica/ +sudo ln -s "${svnDir}src/MCPServer/sharedDirectoryStructure" "/var/archivematica/sharedDirectory" +sudo chown -R archivematica:archivematica "/var/archivematica/sharedDirectory" +sudo chmod -R g+s "/var/archivematica/sharedDirectory" + + +echo setting permission on share directories +sudo chmod -R 777 /var/archivematica/sharedDirectory/ +echo restarting apache +sudo apache2ctl restart + +#Configure sudoers for mcp and client +echo about to edit sudoers file +set -e +cd "$origDir" +tmp="./sudoers-`uuid`" +sudo cat /etc/sudoers > "./ETCsudoersBackup" +sudo grep -v archivematica "/etc/sudoers" > "${tmp}" +sudo echo "archivematica ALL=NOPASSWD:/bin/mv,/bin/chown,/bin/chmod,/usr/bin/unoconv,/usr/bin/gs,/usr/lib/transcoder/transcoderScripts/DocumentConverter.py,/usr/bin/inkscape,/usr/lib/archivematica/transcoder/transcoderScripts/restartOpenOffice.sh" >> "${tmp}" +sudo chown 0:0 "${tmp}" +sudo chmod 440 "${tmp}" +sudo mv -f "${tmp}" /etc/sudoers +echo sudoers file was edited + + diff --git a/localDevSetup/installDependsFromDebianFile.py b/localDevSetup/installDependsFromDebianFile.py new file mode 100755 index 0000000000..b14196e6ca --- /dev/null +++ b/localDevSetup/installDependsFromDebianFile.py @@ -0,0 +1,65 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaDev +# @author Joseph Perry +# @version svn: $Id$ + +#Depends: ${shlibs:Depends}, ${misc:Depends}, libapache2-mod-wsgi, python-django, python-django-doc +import os +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from executeOrRunSubProcess import executeOrRun +excludePackages = ["sip-creation-tools", "sanitize-names"] + + +filePath = sys.argv[1] +if not os.path.isfile(filePath): + print >>sys.stderr, "File doesn't exist." + exit(2) +f = open(filePath, 'r') + +line = f.readline() +while not line.startswith("Depends:"): + line = f.readline() + + + +for part in line.split(","): + part = part.strip() + if part.find("${shlibs:Depends}") != -1 or \ + part.find("${misc:Depends}") != -1: + continue + if part.startswith("archivematica"): + continue + + if part in excludePackages: + continue + + print sys.argv[1] + print "Attempting Install/Update of: ", part + command = "sudo apt-get install -y " + part + exitCode, stdOut, stdError = executeOrRun("command", command, printing=False) + if exitCode: + print "exitCode:", exitCode + print stdOut + print >>sys.stderr, stdError + #else: + #print "OK" diff --git a/localDevSetup/recreateDB.sh b/localDevSetup/recreateDB.sh new file mode 100755 index 0000000000..ea8d7e245d --- /dev/null +++ b/localDevSetup/recreateDB.sh @@ -0,0 +1,71 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @author Joseph Perry +# @version svn: $Id$ + +databaseName="MCP" +currentDir="$(dirname $0)" +username="demo" +password="demo" + +echo "Removing existing units" +sudo ./removeUnitsFromWatchedDirectories.py + +set -e +echo -n "Enter the DATABASE root password (Hit enter if blank):" +read dbpassword + +if [ ! -z "$dbpassword" ] ; then + dbpassword="-p${dbpassword}" +else + dbpassword="" +fi +#set -o verbose #echo on +pwd +currentDir="`dirname $0`" +set +e +echo "Removing the old database" +mysql -u root "${dbpassword}" --execute="DROP DATABASE IF EXISTS ${databaseName}" +echo "Removing ${username} user" +mysql -u root "${dbpassword}" --execute="DROP USER '${username}'@'localhost';" +set -e + +echo "Creating MCP database" +mysql -u root "${dbpassword}" --execute="CREATE DATABASE ${databaseName} CHARACTER SET utf8 COLLATE utf8_unicode_ci;" + +echo "Creating and populating MCP Tables" +mysql -u root "${dbpassword}" --execute="USE ${databaseName}; SOURCE $currentDir/../src/MCPServer/share/mysql;" + +echo "Creating and populating Transcoder Tables" +mysql -u root "${dbpassword}" --execute="USE ${databaseName}; SOURCE $currentDir/../src/transcoder/share/mysql;" + +echo "Creating ${username} user" +mysql -u root "${dbpassword}" --execute="CREATE USER '${username}'@'localhost' IDENTIFIED BY '${password}';" +mysql -u root "${dbpassword}" --execute="GRANT SELECT, UPDATE, INSERT, DELETE ON ${databaseName}.* TO '${username}'@'localhost';" + +echo "Creating dashboard user" +mysql -u root "${dbpassword}" --execute="USE ${databaseName}; INSERT INTO auth_user (username, email, password, is_staff, is_active, is_superuser, date_joined) VALUES ('demo', 'demo@example.com', 'sha1\$e7fc2\$6123f456bba92c67a409baf2c282398fc5f70fc9', TRUE, TRUE, TRUE, NOW() );" + +dbpassword="" + +#set +o verbose #echo off +printGreen="${databaseName} database created successfully." +echo -e "\e[6;32m${printGreen}\e[0m" diff --git a/localDevSetup/removeUnitsFromWatchedDirectories.py b/localDevSetup/removeUnitsFromWatchedDirectories.py new file mode 100755 index 0000000000..0acfd268d1 --- /dev/null +++ b/localDevSetup/removeUnitsFromWatchedDirectories.py @@ -0,0 +1,67 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage DevCleanup +# @author Joseph Perry +# @version svn: $Id$ +import os +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +from databaseFunctions import insertIntoEvents + +alsoRemove = ["/var/archivematica/sharedDirectory/watchedDirectories/SIPCreation/completedTransfers/", \ + "/var/archivematica/sharedDirectory/failed/", \ + "/var/archivematica/sharedDirectory/currentlyProcessing/", \ + "/var/archivematica/sharedDirectory/rejected/"] + +def removeEverythingInDirectory(directory): + if directory[-1] != "/": + directory = "%s/" % (directory) + execute = "sudo rm -rf \"%s\"*" % (directory) + print "executing: ", execute + os.system(execute) + +def cleanWatchedDirectories(): + sql = """SELECT watchedDirectoryPath FROM WatchedDirectories;""" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + try: + directory = row[0].replace("%watchDirectoryPath%", "/var/archivematica/sharedDirectory/watchedDirectories/", 1) + removeEverythingInDirectory(directory) + except Exception as inst: + print "debug except 2" + print type(inst) # the exception instance + print inst.args # arguments stored in .args + row = c.fetchone() + sqlLock.release() + +if __name__ == '__main__': + if True: + import getpass + user = getpass.getuser() + print "user: ", user + if user != "root": + print "Please run as root (with sudo)" + exit (1) + cleanWatchedDirectories() + for directory in alsoRemove: + removeEverythingInDirectory(directory) diff --git a/src/MCPClient/README b/src/MCPClient/README new file mode 100644 index 0000000000..e072dd947f --- /dev/null +++ b/src/MCPClient/README @@ -0,0 +1,3 @@ +This folder contains the archivematica client. + +The archivematica client gets instructions from the MCP and performs them. diff --git a/src/MCPClient/debian/archivematica-mcp-client.install b/src/MCPClient/debian/archivematica-mcp-client.install new file mode 100644 index 0000000000..5f12421f5f --- /dev/null +++ b/src/MCPClient/debian/archivematica-mcp-client.install @@ -0,0 +1,5 @@ +etc/* /etc/archivematica/MCPClient/ +lib/* /usr/lib/archivematica/MCPClient/ +init/* /etc/init/ + + diff --git a/src/MCPClient/debian/control b/src/MCPClient/debian/control new file mode 100644 index 0000000000..cf45f18a7b --- /dev/null +++ b/src/MCPClient/debian/control @@ -0,0 +1,14 @@ +Source: archivematica-mcp-client +Section: utils +Priority: extra +Maintainer: Austin Trask +Build-Depends: debhelper (>= 7) +Standards-Version: 3.8.3 +Homepage: http://archivematica.org + +Package: archivematica-mcp-client +Architecture: any +Depends: ${shlibs:Depends}, ${misc:Depends}, logapp, gearman, python-gearman, uuid, clamav, clamav-daemon, unrar-free, p7zip-full, nfs-common, python-lxml, fits, bagit, archivematica-transcoder, md5deep, archivematica-common, python-mysqldb, python-pyicu, libxml2-utils, elasticsearch, openjdk-7-jre-headless, python-rfc6266 +Description: MCP Client for Archivematica + Enter long description + diff --git a/src/MCPClient/debian/copyright b/src/MCPClient/debian/copyright new file mode 100644 index 0000000000..3906e99c09 --- /dev/null +++ b/src/MCPClient/debian/copyright @@ -0,0 +1,37 @@ +This work was packaged for Ubuntu by: + + Austin Trask + +It was downloaded from http://archivematica.org + +Upstream Author(s): + + Joseph Perry + Jesus Garcia Crespo + Austin Trask + Peter Van Garderen + Evelyn McLellan + +Copyright: + + Copyright (C) 2010-2012 Artefactual Systems Inc. + +License: + + This is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This software is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this software. If not, see . + + +The Debian packaging is: + + Copyright (C) 2010-2012 Artefactual Systems Inc. diff --git a/src/MCPClient/debian/postinst b/src/MCPClient/debian/postinst new file mode 100755 index 0000000000..d17e95e7d5 --- /dev/null +++ b/src/MCPClient/debian/postinst @@ -0,0 +1,9 @@ +#!/bin/sh + +userID=`id -u archivematica` + +if [ "${userID}" = 333 ]; then + echo "User archivematica exists" +else + adduser --uid 333 --group --system --home /var/lib/archivematica/ archivematica +fi diff --git a/src/MCPClient/debian/rules b/src/MCPClient/debian/rules new file mode 100755 index 0000000000..917d9bf25d --- /dev/null +++ b/src/MCPClient/debian/rules @@ -0,0 +1,13 @@ +#!/usr/bin/make -f +# -*- makefile -*- +# Sample debian/rules that uses debhelper. +# This file was originally written by Joey Hess and Craig Small. +# As a special exception, when this file is copied by dh-make into a +# dh-make output file, you may use that output file without restriction. +# This special exception was added by Craig Small in version 0.37 of dh-make. + +# Uncomment this to turn on verbose mode. +#export DH_VERBOSE=1 + +%: + dh $@ diff --git a/src/MCPClient/etc/archivematicaClientModules b/src/MCPClient/etc/archivematicaClientModules new file mode 100644 index 0000000000..4ad06390a1 --- /dev/null +++ b/src/MCPClient/etc/archivematicaClientModules @@ -0,0 +1,97 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage Ingest +# @author Joseph Perry +# @version svn: $Id$ + +#!!! this is a list of supported commands/tasks, not modules +#the version numbers are versions of the protocol, not versions of the programs themselves. + +[supportedCommandsSpecial] +upload-qubit_v0.0 = upload-qubit +upload-contentDM_v0.0 = %clientScriptsDirectory%upload-contentDM.py +restructureDIPForContentDMUpload_v0.0 = %clientScriptsDirectory%restructureDIPForContentDMUpload.py +storeAIP_v0.0 = %clientScriptsDirectory%storeAIP.py +7z_v0.0 = /usr/bin/7z +elasticSearchIndex_v0.0 = %clientScriptsDirectory%elasticSearchIndexProcessTransfer.py +elasticSearchAIPIndex_v0.0 = %clientScriptsDirectory%elasticSearchIndexProcessAIP.py + +[supportedCommands] +echo_v0.0 = /bin/echo +FITS_v0.0 = %clientScriptsDirectory%archivematicaFITS.py +setSIPQuarantine_v0.0 = %clientScriptsDirectory%quarantineSIP.sh +setFilePermission_v0.0 = chmod +verifyMD5_v0.0 = %clientScriptsDirectory%verifyMD5.sh +archivematicaClamscan_v0.0 = %clientScriptsDirectory%archivematicaClamscan.py +sanitizeObjectNames_v0.0 = %clientScriptsDirectory%sanitizeObjectNames.py +sanitizeSIPName_v0.0 = %clientScriptsDirectory%sanitizeSIPName.py +createMETS_v0.0 = %clientScriptsDirectory%archivematicaCreateMETS.py +createMETS_v2.0 = %clientScriptsDirectory%archivematicaCreateMETS2.py +createDirectory_v0.0 = mkdir +copy_v0.0 = cp +move_v0.0 = sudo mv +moveSIP_v0.0 = %clientScriptsDirectory%archivematicaMoveSIP.py +moveTransfer_v0.0 = %clientScriptsDirectory%archivematicaMoveTransfer.py +remove_v0.0 = rm +removeWithAsterisk_v0.0 = %clientScriptsDirectory%removeWithAsterisk.sh +bagit_v0.0 = %clientScriptsDirectory%archivematicaBagWithEmptyDirectories.py +verifySIPCompliance_v0.0 = %clientScriptsDirectory%verifySIPCompliance.py +verifyTransferCompliance_v0.0 = %clientScriptsDirectory%verifyTransferCompliance.py +removeFilesWithoutPresmisMetadata_v0.0 = %clientScriptsDirectory%removeFilesWithoutPresmisMetadata.py +setDirectoryPermissionsForAppraisal_v0.0 = %clientScriptsDirectory%setDirectoryPermissionsForAppraisal.sh +backupDIP_v0.0 = %clientScriptsDirectory%backupDIP.sh +sha256deepRelative_v0.0 = %clientScriptsDirectory%sha256deepRelative.sh +transcoderExtractPackages_v0.0 = /usr/lib/archivematica/transcoder/transcoderExtraction.py +transcoderNormalizePreservation_v0.0 = /usr/lib/archivematica/transcoder/transcoderNormalizer.py +transcoderNormalizeAccess_v0.0 = /usr/lib/archivematica/transcoder/transcoderNormalizer.py +transcoderNormalizeThumbnails_v0.0 = /usr/lib/archivematica/transcoder/transcoderNormalizer.py +checkForSubmissionDocumenation_v0.0 = %clientScriptsDirectory%checkForSubmissionDocumenation.py +removeUnneededFiles_v0.0 = %clientScriptsDirectory%removeUnneededFiles.py +verifyPREMISChecksums_v0.0 = %clientScriptsDirectory%verifyPREMISChecksums.py +verifyBAG_v0.0 = %clientScriptsDirectory%verifyBAG.py +extractBAG_v0.0 = %clientScriptsDirectory%extractBAG.sh +restructureBAG_v0.0 = %clientScriptsDirectory%restructureBAG.sh +assignFileUUIDs_v0.0 = %clientScriptsDirectory%archivematicaAssignFileUUID.py +createEvent_v0.0 = %clientScriptsDirectory%createEvent.py +updateSizeAndChecksum_v0.0 = %clientScriptsDirectory%archivematicaUpdateSizeAndChecksum.py +copyTransfersMetadataAndLogs_v0.0 = %clientScriptsDirectory%copyTransfersMetadataAndLogs.py +verifyChecksumsInFileSecOfDspaceMETSFiles_v0.0 = %clientScriptsDirectory%verifyChecksumsInFileSecOfDspaceMETSFiles.py +moveDspaceLicenseFilesToDSpaceLicenses_v0.0 = %clientScriptsDirectory%moveDspaceLicenseFilesToDSpaceLicenses.py +identifyDspaceLicenseFiles_v0.0 = %clientScriptsDirectory%identifyDspaceLicenseFiles.py +identifyDspaceTextFiles_v0.0 = %clientScriptsDirectory%identifyDspaceTextFiles.py +identifyDspaceMETSFiles_v0.0 = %clientScriptsDirectory%identifyDspaceMETSFiles.py +moveDspaceMetsFilesToDSpaceMETS_v0.0 = %clientScriptsDirectory%moveDspaceMetsFilesToDSpaceMETS.py +checkForAccessDirectory_v0.0 = %clientScriptsDirectory%checkForAccessDirectory.py +checkForServiceDirectory_v0.0 = %clientScriptsDirectory%checkForServiceDirectory.py +archivematicaSetTransferType_v0.0 = %clientScriptsDirectory%archivematicaSetTransferType.py +createSIPfromTransferObjects_v0.0 = %clientScriptsDirectory%createSIPfromTransferObjects.py +checkTransferDirectoryForObjects_v0.0 = %clientScriptsDirectory%checkTransferDirectoryForObjects.py +copyTransferSubmissionDocumentation_v0.0 = %clientScriptsDirectory%copyTransferSubmissionDocumentation.py +removeEmptyDirectories_v0.0 = %clientScriptsDirectory%removeEmptyDirectories.py +loadLabelsFromCSV_v0.0 = %clientScriptsDirectory%loadLabelsFromCSV.py +removeHiddenFilesAndDirectories_v0.0 = %clientScriptsDirectory%removeHiddenFilesAndDirectories.py +restructureForCompliance_v0.0 = %clientScriptsDirectory%restructureForCompliance.py +restructureForComplianceMaildir_v0.0 = %clientScriptsDirectory%restructureForComplianceMaildir.py +verifyAndRestructureTransferBag_v0.0 = %clientScriptsDirectory%verifyAndRestructureTransferBag.py +extractBagTransfer_v0.0 = %clientScriptsDirectory%extractBagTransfer.py +identifyFilesByExtension_v0.0 = %clientScriptsDirectory%identifyFilesByExtension.py +extractMaildirAttachments_v0.0 = %clientScriptsDirectory%extractMaildirAttachments.py +archivematicaVerifyMets_v0.0 = %clientScriptsDirectory%archivematicaVerifyMets.sh +getContentdmCollectionList_v0.0 = %clientScriptsDirectory%getContentdmCollectionList.py + diff --git a/src/MCPClient/etc/clientConfig.conf b/src/MCPClient/etc/clientConfig.conf new file mode 100755 index 0000000000..86f38278bf --- /dev/null +++ b/src/MCPClient/etc/clientConfig.conf @@ -0,0 +1,35 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + + +# @package Archivematica +# @subpackage MCPClient +# @author Joseph Perry +# @version svn: $Id$ + +[MCPClient] +MCPArchivematicaServer = localhost:4730 +sharedDirectoryMounted = /var/archivematica/sharedDirectory/ +maxThreads = 2 +archivematicaClientModules = /etc/archivematica/MCPClient/archivematicaClientModules +clientScriptsDirectory = /usr/lib/archivematica/MCPClient/clientScripts/ +LoadSupportedCommandsSpecial = True +#numberOfTasks 0 means detect number of cores, and use that. +numberOfTasks = 0 +disableElasticsearchIndexing = False diff --git a/src/MCPClient/init.d/archivematica-mcp-clientd b/src/MCPClient/init.d/archivematica-mcp-clientd new file mode 100755 index 0000000000..a24047ebc6 --- /dev/null +++ b/src/MCPClient/init.d/archivematica-mcp-clientd @@ -0,0 +1,122 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage Ingest +# @author Joseph Perry +# @version svn: $Id$ + +#source /etc/archivematica/archivematicaConfig.conf +set -e +pidFile="/tmp/archivematicaMCPClient.pid" +hostName=`hostname` +logFile="/var/archivematica/sharedDirectory/archivematicaMCPClient-${hostName}.log" + + +startOODaemon() { + op="`netstat -l | grep 8100`" + if [ -z "$op" ]; then + echo starting OpenOffice in listening mode on TCP port 8100. + + #I found these scripts @ http://www.oooninja.com/2008/02/batch-command-line-file-conversion-with.html + #Posted by Andrew Z at Wednesday, February 27, 2008 + #!/bin/bash + # Try to autodetect OOFFICE and OOOPYTHON. + OOFFICE=`ls /usr/bin/openoffice.org2.4 /usr/bin/ooffice /usr/lib/openoffice/program/soffice 2>/dev/null | head -n 1` + OOOPYTHON=`ls /opt/openoffice.org*/program/python /usr/bin/python 2>/dev/null | head -n 1` + if [ ! -x "$OOFFICE" ] ; then + echo "Could not auto-detect OpenOffice.org binary" + exit + fi + if [ ! -x "$OOOPYTHON" ]; then + echo "Could not auto-detect OpenOffice.org Python" + exit + fi + echo "Detected OpenOffice.org binary: $OOFFICE" + echo "Detected OpenOffice.org python: $OOOPYTHON" + # Reference: http://wiki.services.openoffice.org/wiki/Using_Python_on_Linux + # If you use the OpenOffice.org that comes with Fedora or Ubuntu, uncomment the following line: + export PYTHONPATH="/usr/lib/openoffice.org/program" + # If you want to simulate for testing that there is no X server, uncomment the next line. + unset DISPLAY + # Kill any running OpenOffice.org processes. + killall -u `whoami` -q soffice + # Download the converter script if necessary. + #test -f DocumentConverter.py || wget http://www.artofsolving.com/files/DocumentConverter.py + # Start OpenOffice.org in listening mode on TCP port 8100. + sudo $OOFFICE "-accept=socket,host=localhost,port=8100;urp;StarOffice.ServiceManager" -norestore -nofirststartwizard -nologo -headless & + # Wait a few seconds to be sure it has started. + sleep 6 + fi +} + +startArchivematica() { + echo "Starting" + tmpDir="`pwd 2>/dev/null`" + #startOODaemon + cd /usr/lib/archivematica/MCPClient/ + sudo -u archivematica twistd -y /usr/lib/archivematica/MCPClient/archivematicaClient.py --pidfile "$pidFile" -l "$logFile" #--gid archivematica + sudo -u archivematica chmod 755 "$pidFile" + sudo -u archivematica chmod 755 "$logFile" + echo PID: `cat "$pidFile"` + cd "$tmpDir" + echo "Started" +} + +stopArchivematica() { + echo "Stopping" + "$0" status && kill `cat "$pidFile"` + echo "Stopped" +} + +case "$1" in + start) + startArchivematica + ;; + stop) + stopArchivematica + ;; + + reload|force-reload) + stopArchivematica + startArchivematica + ;; + + restart) + stopArchivematica + startArchivematica + ;; + + status) + #status_of_proc -p "$pidFile" /usr/bin/python /usr/bin/twistd && exit 0 || exit $? + if [ -n "`cat "$pidFile" 2>/dev/null`" ]; then + ps `cat "$pidFile"` + exit 0 + else + echo "Not running" + exit 1 + fi + ;; + + *) + echo "usage [start | stop | restart ]" + exit 1 +esac + +exit 0 diff --git a/src/MCPClient/init/archivematica-mcp-client.conf b/src/MCPClient/init/archivematica-mcp-client.conf new file mode 100644 index 0000000000..5a2c1d74df --- /dev/null +++ b/src/MCPClient/init/archivematica-mcp-client.conf @@ -0,0 +1,66 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +description "Archivematica MCP Client" +author "Austin Trask , Joseph Perry " + +start on (net-device-up + and local-filesystems + and runlevel [2345]) +stop on runlevel [016] + +env CONF=/etc/archivematica/MCPClient +env LOCATION=/usr/lib/archivematica/MCPClient/archivematicaClient.py + +setuid archivematica +setgid archivematica + +pre-start script + + # Check that $CONF directory exists + [ -d $CONF ] + + # Wait for Gearman service + while [ ! -f /var/run/gearman/gearmand.pid ] + do + sleep 20 + done + +end script + +script + + # Build LOGFILE path + HOSTNAME=`hostname` + LOGFILE=/tmp/archivematicaMCPClient-${HOSTNAME}.log + + # Run + $LOCATION 2>>$LOGFILE 1>&2 + + # Logapp + # LOGTIME=true + # APPENDLOG=true + # CIRCULARLOG=true + # MAXLOGSIZE=10000 # Max 4000000 + # logapp --logtime=$LOGTIME \ + # --maxlogsize=$MAXLOGSIZE \ + # --logfile="$LOGFILE" \ + # --appendlog=$APPENDLOG \ + # --circularlog=$CIRCULARLOG \ + # $LOCATION + +end script diff --git a/src/MCPClient/lib/archivematicaClient.py b/src/MCPClient/lib/archivematicaClient.py new file mode 100755 index 0000000000..601fba2b1b --- /dev/null +++ b/src/MCPClient/lib/archivematicaClient.py @@ -0,0 +1,197 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClient +# @author Joseph Perry +# @version svn: $Id$ + +#~DOC~ +# +# --- This is the MCP Client--- +#It connects to the MCP server, and informs the server of the tasks it can perform. +#The server can send a command (matching one of the tasks) for the client to perform. +#The client will perform that task, and return the exit code and output to the server. +# +#For archivematica 0.9 release. Added integration with the transcoder. +#The server will send the transcoder association pk, and file uuid to run. +#The client is responsible for running the correct command on the file. + +import sys +import os +import shlex +import subprocess +import time +import threading +import string +import ConfigParser +from socket import gethostname +import transcoderNormalizer +import gearman +import threading +import cPickle +import traceback +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from executeOrRunSubProcess import executeOrRun +import databaseInterface +from databaseFunctions import logTaskAssignedSQL +printOutputLock = threading.Lock() + +config = ConfigParser.SafeConfigParser({'MCPArchivematicaServerInterface': ""}) +config.read("/etc/archivematica/MCPClient/clientConfig.conf") + +replacementDic = { + "%sharedPath%":config.get('MCPClient', "sharedDirectoryMounted"), \ + "%clientScriptsDirectory%":config.get('MCPClient', "clientScriptsDirectory") +} +supportedModules = {} + +def loadSupportedModulesSupport(key, value): + for key2, value2 in replacementDic.iteritems(): + value = value.replace(key2, value2) + if not os.path.isfile(value): + print >>sys.stderr, "Warning - Module can't find file, or relies on system path:{%s}%s" % (key.__str__(), value.__str__()) + supportedModules[key] = value + " " + +def loadSupportedModules(file): + supportedModulesConfig = ConfigParser.RawConfigParser() + supportedModulesConfig.read(file) + for key, value in supportedModulesConfig.items('supportedCommands'): + loadSupportedModulesSupport(key, value) + + loadSupportedCommandsSpecial = config.get('MCPClient', "LoadSupportedCommandsSpecial") + if loadSupportedCommandsSpecial.lower() == "yes" or \ + loadSupportedCommandsSpecial.lower() == "true": + for key, value in supportedModulesConfig.items('supportedCommandsSpecial'): + loadSupportedModulesSupport(key, value) + + +def executeCommand(gearman_worker, gearman_job): + try: + execute = gearman_job.task + print "executing:", execute, "{", gearman_job.unique, "}" + data = cPickle.loads(gearman_job.data) + utcDate = databaseInterface.getUTCDate() + arguments = data["arguments"]#.encode("utf-8") + if isinstance(arguments, unicode): + arguments = arguments.encode("utf-8") + #if isinstance(arguments, str): + # arguments = unicode(arguments) + + sInput = "" + clientID = gearman_worker.worker_client_id + + #if True: + # print clientID, execute, data + logTaskAssignedSQL(gearman_job.unique.__str__(), clientID, utcDate) + + if execute not in supportedModules: + output = ["Error!", "Error! - Tried to run and unsupported command." ] + exitCode = -1 + return cPickle.dumps({"exitCode" : exitCode, "stdOut": output[0], "stdError": output[1]}) + command = supportedModules[execute] + + + replacementDic["%date%"] = utcDate + replacementDic["%jobCreatedDate%"] = data["createdDate"] + #Replace replacement strings + for key in replacementDic.iterkeys(): + command = command.replace ( key, replacementDic[key] ) + arguments = arguments.replace ( key, replacementDic[key] ) + + key = "%taskUUID%" + value = gearman_job.unique.__str__() + arguments = arguments.replace(key, value) + + #execute command + + command += " " + arguments + printOutputLock.acquire() + print >>sys.stderr, "{" + gearman_job.unique + "}" + command.__str__() + "" + printOutputLock.release() + exitCode, stdOut, stdError = executeOrRun("command", command, sInput, printing=False) + return cPickle.dumps({"exitCode" : exitCode, "stdOut": stdOut, "stdError": stdError}) + #catch OS errors + except OSError, ose: + traceback.print_exc(file=sys.stdout) + printOutputLock.acquire() + print >>sys.stderr, "Execution failed:", ose + printOutputLock.release() + output = ["Config Error!", ose.__str__() ] + exitCode = 1 + return cPickle.dumps({"exitCode" : exitCode, "stdOut": output[0], "stdError": output[1]}) + except: + traceback.print_exc(file=sys.stdout) + printOutputLock.acquire() + print sys.exc_info().__str__() + print "Unexpected error:", sys.exc_info()[0] + printOutputLock.release() + output = ["", sys.exc_info().__str__()] + return cPickle.dumps({"exitCode" : -1, "stdOut": output[0], "stdError": output[1]}) + + +def startThread(threadNumber): + gm_worker = gearman.GearmanWorker([config.get('MCPClient', "MCPArchivematicaServer")]) + hostID = gethostname() + "_" + threadNumber.__str__() + gm_worker.set_client_id(hostID) + for key in supportedModules.iterkeys(): + printOutputLock.acquire() + print "registering:", '"' + key + '"' + printOutputLock.release() + gm_worker.register_task(key, executeCommand) + + #load transoder jobs + sql = """SELECT CommandRelationships.pk FROM CommandRelationships JOIN Commands on CommandRelationships.command = Commands.pk WHERE supportedBy = 1;""" + rows = databaseInterface.queryAllSQL(sql) + if rows: + for row in rows: + CommandRelationshipsPK = row[0] + key = "transcoder_cr%d" % (CommandRelationshipsPK) + printOutputLock.acquire() + print "registering:", '"' + key + '"' + printOutputLock.release() + gm_worker.register_task(key, transcoderNormalizer.executeCommandReleationship) + gm_worker.work() + + +def flushOutputs(): + while True: + sys.stdout.flush() + sys.stderr.flush() + time.sleep(5) + +def startThreads(t=1): + if True: + t2 = threading.Thread(target=flushOutputs) + t2.daemon = True + t2.start() + if t == 0: + from externals.detectCores import detectCPUs + t = detectCPUs() + for i in range(t): + t = threading.Thread(target=startThread, args=(i+1, )) + t.daemon = True + t.start() + +if __name__ == '__main__': + loadSupportedModules(config.get('MCPClient', "archivematicaClientModules")) + startThreads(config.getint('MCPClient', "numberOfTasks")) + tl = threading.Lock() + tl.acquire() + tl.acquire() diff --git a/src/MCPClient/lib/clientScripts/archivematicaAssignFileUUID.py b/src/MCPClient/lib/clientScripts/archivematicaAssignFileUUID.py new file mode 100755 index 0000000000..bbf1707d1e --- /dev/null +++ b/src/MCPClient/lib/clientScripts/archivematicaAssignFileUUID.py @@ -0,0 +1,64 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +import sys +import uuid +from optparse import OptionParser +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from fileOperations import addFileToTransfer +from fileOperations import addFileToSIP + + +if __name__ == '__main__': + parser = OptionParser() + parser.add_option("-i", "--fileUUID", action="store", dest="fileUUID", default="") + parser.add_option("-p", "--filePath", action="store", dest="filePath", default="") + parser.add_option("-d", "--date", action="store", dest="date", default="") + parser.add_option("-u", "--eventIdentifierUUID", action="store", dest="eventIdentifierUUID", default="") + parser.add_option("-s", "--sipDirectory", action="store", dest="sipDirectory", default="") + parser.add_option("-S", "--sipUUID", action="store", dest="sipUUID", default="") + parser.add_option("-T", "--transferUUID", action="store", dest="transferUUID", default="") + parser.add_option("-e", "--use", action="store", dest="use", default="original") + + + (opts, args) = parser.parse_args() + opts2 = vars(opts) +# for key, value in opts2.iteritems(): +# print type(key), key, type(value), value +# exec 'opts.' + key + ' = value.decode("utf-8")' + fileUUID = opts.fileUUID + if not fileUUID or fileUUID == "None": + fileUUID = uuid.uuid4().__str__() + + + if opts.sipUUID == "" and opts.transferUUID != "": + filePathRelativeToSIP = opts.filePath.replace(opts.sipDirectory,"%transferDirectory%", 1) + addFileToTransfer(filePathRelativeToSIP, fileUUID, opts.transferUUID, opts.eventIdentifierUUID, opts.date, use=opts.use) + + elif opts.sipUUID != "" and opts.transferUUID == "": + filePathRelativeToSIP = opts.filePath.replace(opts.sipDirectory,"%SIPDirectory%", 1) + addFileToSIP(filePathRelativeToSIP, fileUUID, opts.sipUUID, opts.eventIdentifierUUID, opts.date, use=opts.use) + + else: + print >>sys.stderr, "SIP exclusive-or Transfer uuid must be defined" + exit(2) diff --git a/src/MCPClient/lib/clientScripts/archivematicaBagWithEmptyDirectories.py b/src/MCPClient/lib/clientScripts/archivematicaBagWithEmptyDirectories.py new file mode 100755 index 0000000000..747f746d2d --- /dev/null +++ b/src/MCPClient/lib/clientScripts/archivematicaBagWithEmptyDirectories.py @@ -0,0 +1,75 @@ +#!/usr/bin/python -OO +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +import os +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from executeOrRunSubProcess import executeOrRun + + + +def runBag(arguments): + command = "/usr/share/bagit/bin/bag %s" % (arguments) + exitCode, stdOut, stdError = executeOrRun("command", command, printing=False) + if exitCode != 0: + print >>sys.stderr, "" + print >>sys.stderr, "Error with command: ", command + print >>sys.stderr, "Standard OUT:" + print >>sys.stderr, stdOut + print >>sys.stderr, "Standard Error:" + print >>sys.stderr, stdError + exit(exitCode) + else: + print stdOut + print >>sys.stderr, stdError + +def getListOfDirectories(dir): + ret = [] + for dir2, subDirs, files in os.walk(dir): + for subDir in subDirs: + p = os.path.join(dir2, subDir).replace(dir + "/", "", 1) + ret.append(p) + ret.append(dir2.replace(dir + "/", "", 1)) + print "directory list:" + for dir in ret: + print "\t", dir + return ret + +def createDirectoriesAsNeeded(baseDir, dirList): + for dir in dirList: + directory = os.path.join(baseDir, dir) + if not os.path.isdir(directory): + try: + os.makedirs(directory) + except: + continue + +if __name__ == '__main__': + dest = sys.argv[2] + SIPDir = os.path.dirname(dest) + dirList = getListOfDirectories(SIPDir) + arguments = "" + for s in sys.argv[1:]: + arguments = "%s \"%s\"" % (arguments, s) + runBag(arguments) + createDirectoriesAsNeeded(os.path.join(dest, "data"), dirList) diff --git a/src/MCPClient/lib/clientScripts/archivematicaCheckMD5NoGUI.sh b/src/MCPClient/lib/clientScripts/archivematicaCheckMD5NoGUI.sh new file mode 100755 index 0000000000..ab595d8f9b --- /dev/null +++ b/src/MCPClient/lib/clientScripts/archivematicaCheckMD5NoGUI.sh @@ -0,0 +1,69 @@ +#!/bin/bash +# +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +#create temp report files +UUID=`uuid` +failTmp=/tmp/fail-$UUID +passTmp=/tmp/pass-$UUID +reportTmp=/tmp/report-$UUID + +checkFolder="$1" +md5Digest="$2" +integrityReport="$3" +checksumTool="$4" + +tmpDir=`pwd` +cd "$checkFolder" +#check for passing checksums +"${checksumTool}" -r -m "$md5Digest" . > $passTmp +#check for failing checksums +"${checksumTool}" -r -n -m "$md5Digest" . > $failTmp +cd $tmpDir + + + +#Count number of Passed/Failed +numberPass=`wc -l $passTmp| cut -d" " -f1` +numberFail=`wc -l $failTmp| cut -d" " -f1` + +#Create report +echo "PASSED" >> $reportTmp +cat $passTmp >> $reportTmp +echo " " >> $reportTmp +echo $numberPass "items passed integrity checking" >> $reportTmp +echo " " >> $reportTmp +echo " " >> $reportTmp +echo "FAILED" >> $reportTmp +cat $failTmp >> $reportTmp +echo " " >> $reportTmp +echo $numberFail "items failed integrity checking" >> $reportTmp + +#copy pasta +cp $reportTmp "$integrityReport" +cat $failTmp 1>&2 + +#cleanup +rm $failTmp $passTmp $reportTmp + +exit $numberFail diff --git a/src/MCPClient/lib/clientScripts/archivematicaClamscan.py b/src/MCPClient/lib/clientScripts/archivematicaClamscan.py new file mode 100755 index 0000000000..416faff6e7 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/archivematicaClamscan.py @@ -0,0 +1,68 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +#source /etc/archivematica/archivematicaConfig.conf +import os +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from executeOrRunSubProcess import executeOrRun +from databaseFunctions import insertIntoEvents +from archivematicaFunctions import escapeForCommand + +clamscanResultShouldBe="Infected files: 0" + +if __name__ == '__main__': + fileUUID = sys.argv[1] + target = sys.argv[2] + date = sys.argv[3] + taskUUID = sys.argv[4] + + command = 'clamdscan - <"' + escapeForCommand(target) + '"' + print >>sys.stderr, command + commandVersion = "clamdscan -V" + eventOutcome = "Pass" + + clamscanOutput = executeOrRun("bashScript", command, printing=False) + clamscanVersionOutput = executeOrRun("command", commandVersion, printing=False) + + if clamscanOutput[0] or clamscanVersionOutput[0]: + if clamscanVersionOutput[0]: + print >>sys.stderr, clamscanVersionOutput + exit(2) + else: + eventOutcome = "Fail" + + if eventOutcome == "Fail" or clamscanOutput[1].find(clamscanResultShouldBe) == -1: + eventOutcome = "Fail" + print >>sys.stderr, fileUUID, " - ", os.path.basename(target) + print >>sys.stderr, clamscanOutput + + version, virusDefs, virusDefsDate = clamscanVersionOutput[1].split("/") + virusDefs = virusDefs + "/" + virusDefsDate + eventDetailText = "program=\"Clam AV\"; version=\"" + version + "\"; virusDefinitions=\"" + virusDefs + "\"" + + if fileUUID != "None": + insertIntoEvents(fileUUID=fileUUID, eventIdentifierUUID=taskUUID, eventType="virus check", eventDateTime=date, eventDetail=eventDetailText, eventOutcome=eventOutcome, eventOutcomeDetailNote="") + if eventOutcome != "Pass": + exit(3) diff --git a/src/MCPClient/lib/clientScripts/archivematicaCreateMETS.py b/src/MCPClient/lib/clientScripts/archivematicaCreateMETS.py new file mode 100755 index 0000000000..1b429653a1 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/archivematicaCreateMETS.py @@ -0,0 +1,199 @@ +#!/usr/bin/python -OO +# +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +from archivematicaXMLNamesSpace import * + +import os +import uuid +import sys +import lxml.etree as etree +import string +import MySQLdb +from xml.sax.saxutils import quoteattr +from datetime import datetime +from createXmlEventsAssist import createArchivematicaAgent +from createXmlEventsAssist import createOrganizationAgent +#from archivematicaCreateMETS2 import escape +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +#from archivematicaFunctions import escape + + +UUIDsDic={} +amdSec=[] + +from optparse import OptionParser +parser = OptionParser() +parser.add_option("-s", "--basePath", action="store", dest="basePath", default="") +parser.add_option("-b", "--basePathString", action="store", dest="basePathString", default="SIPDirectory") #transferDirectory +parser.add_option("-f", "--fileGroupIdentifier", action="store", dest="fileGroupIdentifier", default="sipUUID") #transferUUID +parser.add_option("-S", "--sipUUID", action="store", dest="sipUUID", default="") +parser.add_option("-x", "--xmlFile", action="store", dest="xmlFile", default="") +parser.add_option("-a", "--amdSec", action="store_true", dest="amdSec", default=False) +(opts, args) = parser.parse_args() +print opts + + +SIPUUID = opts.sipUUID +basePath = opts.basePath +XMLFile = opts.xmlFile +includeAmdSec = opts.amdSec +basePathString = "%%%s%%" % (opts.basePathString) +fileGroupIdentifier = opts.fileGroupIdentifier + +def escape(string): + string = string.decode('utf-8') + return string + + +def newChild(parent, tag, text=None, tailText=None): + child = etree.Element(tag) + parent.append(child) + child.text = text + return child + + + +#Do /SIP-UUID/ +#Force only /SIP-UUID/objects +doneFirstRun = False +def createFileSec(path, parentBranch, structMapParent): + print >>sys.stderr, "createFileSec: ", path, parentBranch, structMapParent + doneFirstRun = True + pathSTR = path.__str__() + pathSTR = path.__str__() + if pathSTR == basePath + "objects/": #IF it's it's the SIP folder, it's OBJECTS + pathSTR = "objects" + #pathSTR = string.replace(path.__str__(), "/tmp/" + sys.argv[2] + "/" + sys.argv[3], "objects", 1) + #if pathSTR + "/" == basePath: #if it's the very first run through (recursive function) + if path == basePath: #if it's the very first run through (recursive function) + pathSTR = os.path.basename(os.path.dirname(basePath)) + #structMapParent.set("DMDID", "SIP-description") + + #currentBranch = newChild(parentBranch, "fileGrp") + #currentBranch.set("USE", "directory") + # structMap directory + div = newChild(structMapParent, "div") + createFileSec(os.path.join(path, "objects/"), parentBranch, div) + doneFirstRun = False + filename = os.path.basename(pathSTR) + + structMapParent.set("TYPE", "directory") + structMapParent.set("LABEL", escape(filename)) + + + if doneFirstRun: + for doDirectories in [False, True]: + print "path", type(path), path + directoryContents = os.listdir(path) + directoryContents.sort() + for item in directoryContents: + print "item", type(item), item + itempath = os.path.join(path, item) + if os.path.isdir(itempath): + if not doDirectories: + continue + #currentBranch = newChild(parentBranch, "fileGrp") + #currentBranch.set("USE", "directory") + # structMap directory + div = newChild(structMapParent, "div") + + createFileSec(os.path.join(path, item), parentBranch, div) + elif os.path.isfile(itempath): + if doDirectories: + continue + #myuuid = uuid.uuid4() + myuuid="" + #pathSTR = itempath.replace(basePath + "objects", "objects", 1) + pathSTR = itempath.replace(basePath, basePathString, 1) + + print "pathSTR", type(pathSTR), pathSTR + + sql = """SELECT fileUUID FROM Files WHERE removedTime = 0 AND %s = '%s' AND Files.currentLocation = '%s';""" % (fileGroupIdentifier, SIPUUID, MySQLdb.escape_string(pathSTR)) + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + if row == None: + print >>sys.stderr, "No uuid for file: \"", pathSTR, "\"" + while row != None: + myuuid = row[0] + row = c.fetchone() + sqlLock.release() + + if includeAmdSec: + createDigiprovMD(myuuid, itempath, myuuid) + + pathSTR = itempath.replace(basePath, "", 1) + + fileI = etree.SubElement( parentBranch, "file") + + filename = ''.join(quoteattr(item).split("\"")[1:-1]) + #filename = replace /tmp/"UUID" with /objects/ + + ID = "file-" + myuuid.__str__() + fileI.set("ID", escape(ID)) + if includeAmdSec: + fileI.set("ADMID", "digiprov-" + item.__str__() + "-" + myuuid.__str__()) + + Flocat = newChild(fileI, "FLocat") + Flocat.set(xlinkBNS + "href", escape(pathSTR) ) + Flocat.set("LOCTYPE", "OTHER") + Flocat.set("OTHERLOCTYPE", "SYSTEM") + + # structMap file + #div = newChild(structMapParent, "div") + fptr = newChild(structMapParent, "fptr") + FILEID = "file-" + myuuid.__str__() + fptr.set("FILEID", escape(FILEID)) + +if __name__ == '__main__': + root = etree.Element( "mets", \ + nsmap = {None: metsNS, "xlink": xlinkNS}, \ + attrib = { "{" + xsiNS + "}schemaLocation" : "http://www.loc.gov/METS/ http://www.loc.gov/standards/mets/mets.xsd" } ) + + #cd /tmp/$UUID; + opath = os.getcwd() + os.chdir(basePath) + path = basePath + + #if includeAmdSec: + # amdSec = newChild(root, "amdSec") + + fileSec = etree.Element("fileSec") + #fileSec.tail = "\n" + root.append(fileSec) + + sipFileGrp = etree.SubElement(fileSec, "fileGrp") + sipFileGrp.set("USE", "original") + + structMap = newChild(root, "structMap") + structMap.set("TYPE", "physical") + structMapDiv = newChild(structMap, "div") + + createFileSec(path, sipFileGrp, structMapDiv) + + tree = etree.ElementTree(root) + tree.write(XMLFile, pretty_print=True, xml_declaration=True) + + # Restore original path + os.chdir(opath) diff --git a/src/MCPClient/lib/clientScripts/archivematicaCreateMETS2.py b/src/MCPClient/lib/clientScripts/archivematicaCreateMETS2.py new file mode 100755 index 0000000000..cce528050c --- /dev/null +++ b/src/MCPClient/lib/clientScripts/archivematicaCreateMETS2.py @@ -0,0 +1,727 @@ +#!/usr/bin/python -OO +# -*- coding: utf-8 -*- +# +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +from archivematicaXMLNamesSpace import * +import lxml.etree as etree +from xml.sax.saxutils import quoteattr +import os +import sys +import MySQLdb +import PyICU +import traceback +from archivematicaCreateMETSRights import archivematicaGetRights +from archivematicaCreateMETSRightsDspaceMDRef import archivematicaCreateMETSRightsDspaceMDRef +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +from archivematicaFunctions import escape +from archivematicaFunctions import unicodeToStr +from archivematicaFunctions import strToUnicode +from sharedVariablesAcrossModules import sharedVariablesAcrossModules +sharedVariablesAcrossModules.globalErrorCount = 0 + +from optparse import OptionParser +parser = OptionParser() +parser.add_option("-s", "--baseDirectoryPath", action="store", dest="baseDirectoryPath", default="") +parser.add_option("-b", "--baseDirectoryPathString", action="store", dest="baseDirectoryPathString", default="SIPDirectory") #transferDirectory/ +parser.add_option("-f", "--fileGroupIdentifier", action="store", dest="fileGroupIdentifier", default="") #transferUUID/sipUUID +parser.add_option("-t", "--fileGroupType", action="store", dest="fileGroupType", default="sipUUID") # +parser.add_option("-x", "--xmlFile", action="store", dest="xmlFile", default="") +parser.add_option("-a", "--amdSec", action="store_true", dest="amdSec", default=False) +parser.add_option("-i", "--PyICULocale", action="store", dest="PyICULocale", default='pl_PL.UTF-8') +(opts, args) = parser.parse_args() + + +baseDirectoryPath = opts.baseDirectoryPath +XMLFile = opts.xmlFile +includeAmdSec = opts.amdSec +baseDirectoryPathString = "%%%s%%" % (opts.baseDirectoryPathString) +fileGroupIdentifier = opts.fileGroupIdentifier +fileGroupType = opts.fileGroupType +includeAmdSec = opts.amdSec + +#Global Variables + +globalFileGrps = {} +globalFileGrpsUses = ["original", "submissionDocumentation", "preservation", "service", "access", "license", "text/ocr"] +for use in globalFileGrpsUses: + grp = etree.Element("fileGrp") + grp.set("USE", use) + globalFileGrps[use] = grp + +##counters +global amdSecs +amdSecs = [] +global dmdSecs +dmdSecs = [] +global globalDmdSecCounter +globalDmdSecCounter = 0 +global globalAmdSecCounter +globalAmdSecCounter = 0 +global globalTechMDCounter +globalTechMDCounter = 0 +global globalRightsMDCounter +globalRightsMDCounter = 0 +global globalDigiprovMDCounter +globalDigiprovMDCounter = 0 +global fileNameToFileID #Used for mapping structMaps included with transfer +fileNameToFileID = {} + + + +#GROUPID="G1" -> GROUPID="Group-%object's UUID%" +##group of the object and it's related access, license + +#move to common +def newChild(parent, tag, text=None, tailText=None, sets=[]): + child = etree.Element(tag) + parent.append(child) + child.text = strToUnicode(text) + if tailText: + child.tail = strToUnicode(tailText) + for set in sets: + key, value = set + child.set(key, value) + return child + +def createAgent(agentIdentifierType, agentIdentifierValue, agentName, agentType): + ret = etree.Element("agent") + agentIdentifier = etree.SubElement( ret, "agentIdentifier") + etree.SubElement( agentIdentifier, "agentIdentifierType").text = agentIdentifierType + etree.SubElement( agentIdentifier, "agentIdentifierValue").text = agentIdentifierValue + etree.SubElement( ret, "agentName").text = agentName + etree.SubElement( ret, "agentType").text = agentType + return ret + + +SIPMetadataAppliesToType = 1 +TransferMetadataAppliesToType = 2 +FileMetadataAppliesToType = 3 +def getDublinCore(type_, id): + sql = """SELECT title, creator, subject, description, publisher, contributor, date, type, format, identifier, source, relation, language, coverage, rights + FROM Dublincore WHERE metadataAppliesToType = %s AND metadataAppliesToidentifier = '%s';""" % \ + (type_.__str__(), id.__str__()) + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + if row == None: + sqlLock.release() + return None + ret = etree.Element( "dublincore", nsmap = {None: dcNS, "dcterms": dctermsNS} ) + ret.set(xsiBNS+"schemaLocation", dcNS + " http://dublincore.org/schemas/xmls/qdc/dc.xsd " + dctermsNS + " http://dublincore.org/schemas/xmls/qdc/2008/02/11/dcterms.xsd") + dctermsElements= ["isPartOf"] + while row != None: + key = ["title", "creator", "subject", "description", "publisher", "contributor", "date", "type", "format", "identifier", "source", "relation", "language", "coverage", "rights"] + #title, creator, subject, description, publisher, contributor, date, type, format, identifier, source, relation, language, coverage, rights = row + #key.index("title") == title + i = 0 + for term in key: + if row[i] != None: + txt = row[i] + else: + txt = "" + if term in dctermsElements: + etree.SubElement(ret, dctermsBNS + term).text = txt + else: + newChild(ret, term, text=txt) + i+=1 + + row = c.fetchone() + sqlLock.release() + return ret + +def createDublincoreDMDSec(type, id): + dc = getDublinCore(type, id) + if dc == None: + transfers = os.path.join(baseDirectoryPath, "metadata/transfers/") + for transfer in os.listdir(transfers): + dcXMLFile = os.path.join(transfers, transfer, "metadata/dublincore.xml") + if os.path.isfile(dcXMLFile): + try: + parser = etree.XMLParser(remove_blank_text=True) + dtree = etree.parse(dcXMLFile, parser) + dc = dtree.getroot() + except Exception as inst: + print >>sys.stderr, "error parsing file:", dcXMLFile + print >>sys.stderr, type(inst) # the exception instance + print >>sys.stderr, inst.args + traceback.print_exc(file=sys.stdout) + sharedVariablesAcrossModules.globalErrorCount += 1 + return None + else: + return None + + global globalDmdSecCounter + globalDmdSecCounter += 1 + dmdSec = etree.Element("dmdSec") + ID = "dmdSec_" + globalDmdSecCounter.__str__() + dmdSec.set("ID", ID) + mdWrap = newChild(dmdSec, "mdWrap") + mdWrap.set("MDTYPE", "DC") + xmlData = newChild(mdWrap, "xmlData") + xmlData.append(dc) + return (dmdSec, ID) + +def createMDRefDMDSec(LABEL, itemdirectoryPath, directoryPathSTR): + global globalDmdSecCounter + globalDmdSecCounter += 1 + dmdSec = etree.Element("dmdSec") + ID = "dmdSec_" + globalDmdSecCounter.__str__() + dmdSec.set("ID", ID) + XPTR = "xpointer(id(" + tree = etree.parse(itemdirectoryPath) + root = tree.getroot() + for item in root.findall("{http://www.loc.gov/METS/}dmdSec"): + XPTR = "%s %s" % (XPTR, item.get("ID")) + XPTR = XPTR.replace(" ", "'", 1) + "'))" + newChild(dmdSec, "mdRef", text=None, sets=[("LABEL", LABEL), (xlinkBNS +"href", directoryPathSTR), ("MDTYPE", "OTHER"), ("LOCTYPE","OTHER"), ("OTHERLOCTYPE", "SYSTEM"), ("XPTR", XPTR)]) + return (dmdSec, ID) + + +def createTechMD(fileUUID): + ret = etree.Element("techMD") + techMD = ret #newChild(amdSec, "digiprovMD") + #digiprovMD.set("ID", "digiprov-"+ os.path.basename(filename) + "-" + fileUUID) + global globalTechMDCounter + globalTechMDCounter += 1 + techMD.set("ID", "techMD_"+ globalTechMDCounter.__str__()) + + mdWrap = newChild(techMD,"mdWrap") + mdWrap.set("MDTYPE", "PREMIS:OBJECT") + xmlData = newChild(mdWrap, "xmlData") + #premis = etree.SubElement( xmlData, "premis", nsmap={None: premisNS}, \ + # attrib = { "{" + xsiNS + "}schemaLocation" : "info:lc/xmlns/premis-v2 http://www.loc.gov/standards/premis/premis.xsd" }) + #premis.set("version", "2.0") + + #premis = etree.SubElement( xmlData, "premis", attrib = {xsiBNS+"type": "premis:file"}) + + sql = "SELECT fileSize, checksum FROM Files WHERE fileUUID = '%s';" % (fileUUID) + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + fileSize = row[0].__str__() + checksum = row[1].__str__() + row = c.fetchone() + sqlLock.release() + + #OBJECT + object = etree.SubElement(xmlData, "object", nsmap={None: premisNS}) + object.set( xsiBNS+"type", "file") + object.set(xsiBNS+"schemaLocation", premisNS + " http://www.loc.gov/standards/premis/v2/premis-v2-2.xsd") + object.set("version", "2.2") + + objectIdentifier = etree.SubElement(object, "objectIdentifier") + etree.SubElement(objectIdentifier, "objectIdentifierType").text = "UUID" + etree.SubElement(objectIdentifier, "objectIdentifierValue").text = fileUUID + + #etree.SubElement(object, "objectCategory").text = "file" + + objectCharacteristics = etree.SubElement(object, "objectCharacteristics") + etree.SubElement(objectCharacteristics, "compositionLevel").text = "0" + + fixity = etree.SubElement(objectCharacteristics, "fixity") + etree.SubElement(fixity, "messageDigestAlgorithm").text = "sha256" + etree.SubElement(fixity, "messageDigest").text = checksum + + etree.SubElement(objectCharacteristics, "size").text = fileSize + + sql = "SELECT formatName, formatVersion, formatRegistryName, formatRegistryKey FROM FilesIDs WHERE fileUUID = '%s';" % (fileUUID) + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + if not row: + format = etree.SubElement(objectCharacteristics, "format") + formatDesignation = etree.SubElement(format, "formatDesignation") + etree.SubElement(formatDesignation, "formatName").text = "Unknown" + while row != None: + #print row + format = etree.SubElement(objectCharacteristics, "format") + #fileUUID = row[0] + + formatDesignation = etree.SubElement(format, "formatDesignation") + etree.SubElement(formatDesignation, "formatName").text = row[0] + etree.SubElement(formatDesignation, "formatVersion").text = row[1] + + formatRegistry = etree.SubElement(format, "formatRegistry") + etree.SubElement(formatRegistry, "formatRegistryName").text = row[2] + etree.SubElement(formatRegistry, "formatRegistryKey").text = row[3] + row = c.fetchone() + sqlLock.release() + + objectCharacteristicsExtension = etree.SubElement(objectCharacteristics, "objectCharacteristicsExtension") + + sql = "SELECT FilesFits.FITSxml FROM FilesFits WHERE fileUUID = '" + fileUUID + "';" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + #if not row: + # print >>sys.stderr, "Error no fits.", fileUUID + parser = etree.XMLParser(remove_blank_text=True) + while row != None: + #fits = etree.fromstring(row[0]) + fits = etree.XML(row[0], parser) + objectCharacteristicsExtension.append(fits) + row = c.fetchone() + sqlLock.release() + + sql = "SELECT Files.originalLocation FROM Files WHERE Files.fileUUID = '" + fileUUID + "';" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + if not row: + print >>sys.stderr, "Error no fits." + while row != None: + etree.SubElement(object, "originalName").text = escape(row[0]) + row = c.fetchone() + sqlLock.release() + + #Derivations + sql = "SELECT sourceFileUUID, derivedFileUUID, relatedEventUUID FROM Derivations WHERE sourceFileUUID = '" + fileUUID + "';" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + relationship = etree.SubElement(object, "relationship") + etree.SubElement(relationship, "relationshipType").text = "derivation" + etree.SubElement(relationship, "relationshipSubType").text = "is source of" + + relatedObjectIdentification = etree.SubElement(relationship, "relatedObjectIdentification") + etree.SubElement(relatedObjectIdentification, "relatedObjectIdentifierType").text = "UUID" + etree.SubElement(relatedObjectIdentification, "relatedObjectIdentifierValue").text = row[1] + + relatedEventIdentification = etree.SubElement(relationship, "relatedEventIdentification") + etree.SubElement(relatedEventIdentification, "relatedEventIdentifierType").text = "UUID" + etree.SubElement(relatedEventIdentification, "relatedEventIdentifierValue").text = row[2] + + row = c.fetchone() + sqlLock.release() + + sql = "SELECT sourceFileUUID, derivedFileUUID, relatedEventUUID FROM Derivations WHERE derivedFileUUID = '" + fileUUID + "';" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + relationship = etree.SubElement(object, "relationship") + etree.SubElement(relationship, "relationshipType").text = "derivation" + etree.SubElement(relationship, "relationshipSubType").text = "has source" + + relatedObjectIdentification = etree.SubElement(relationship, "relatedObjectIdentification") + etree.SubElement(relatedObjectIdentification, "relatedObjectIdentifierType").text = "UUID" + etree.SubElement(relatedObjectIdentification, "relatedObjectIdentifierValue").text = row[0] + + relatedEventIdentification = etree.SubElement(relationship, "relatedEventIdentification") + etree.SubElement(relatedEventIdentification, "relatedEventIdentifierType").text = "UUID" + etree.SubElement(relatedEventIdentification, "relatedEventIdentifierValue").text = row[2] + + row = c.fetchone() + sqlLock.release() + return ret + +def createDigiprovMD(fileUUID): + ret = [] + #EVENTS + + #| pk | fileUUID | eventIdentifierUUID | eventType | eventDateTime | eventDetail | eventOutcome | eventOutcomeDetailNote | linkingAgentIdentifier | + sql = "SELECT * FROM Events WHERE fileUUID = '" + fileUUID + "';" + rows = databaseInterface.queryAllSQL(sql) + for row in rows: + digiprovMD = etree.Element("digiprovMD") + ret.append(digiprovMD) #newChild(amdSec, "digiprovMD") + #digiprovMD.set("ID", "digiprov-"+ os.path.basename(filename) + "-" + fileUUID) + global globalDigiprovMDCounter + globalDigiprovMDCounter += 1 + digiprovMD.set("ID", "digiprovMD_"+ globalDigiprovMDCounter.__str__()) + + mdWrap = newChild(digiprovMD,"mdWrap") + mdWrap.set("MDTYPE", "PREMIS:EVENT") + xmlData = newChild(mdWrap,"xmlData") + event = etree.SubElement(xmlData, "event", nsmap={None: premisNS}) + event.set(xsiBNS+"schemaLocation", premisNS + " http://www.loc.gov/standards/premis/v2/premis-v2-2.xsd") + event.set("version", "2.2") + + eventIdentifier = etree.SubElement(event, "eventIdentifier") + etree.SubElement(eventIdentifier, "eventIdentifierType").text = "UUID" + etree.SubElement(eventIdentifier, "eventIdentifierValue").text = row[2] + + etree.SubElement(event, "eventType").text = row[3] + etree.SubElement(event, "eventDateTime").text = row[4].__str__().replace(" ", "T") + etree.SubElement(event, "eventDetail").text = escape(row[5]) + + eventOutcomeInformation = etree.SubElement(event, "eventOutcomeInformation") + etree.SubElement(eventOutcomeInformation, "eventOutcome").text = row[6] + eventOutcomeDetail = etree.SubElement(eventOutcomeInformation, "eventOutcomeDetail") + etree.SubElement(eventOutcomeDetail, "eventOutcomeDetailNote").text = escape(row[7]) + + #linkingAgentIdentifier + sql = """SELECT agentIdentifierType, agentIdentifierValue, agentName, agentType FROM Agents;""" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + linkingAgentIdentifier = etree.SubElement(event, "linkingAgentIdentifier") + etree.SubElement(linkingAgentIdentifier, "linkingAgentIdentifierType").text = row[0] + etree.SubElement(linkingAgentIdentifier, "linkingAgentIdentifierValue").text = row[1] + row = c.fetchone() + sqlLock.release() + return ret + +def createDigiprovMDAgents(): + ret = [] + #AGENTS + sql = """SELECT agentIdentifierType, agentIdentifierValue, agentName, agentType FROM Agents;""" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + global globalDigiprovMDCounter + globalDigiprovMDCounter += 1 + digiprovMD = etree.Element("digiprovMD") + digiprovMD.set("ID", "digiprovMD_"+ globalDigiprovMDCounter.__str__()) + ret.append(digiprovMD) #newChild(amdSec, "digiprovMD") + mdWrap = newChild(digiprovMD,"mdWrap") + mdWrap.set("MDTYPE", "PREMIS:AGENT") + xmlData = newChild(mdWrap,"xmlData") + #agents = etree.SubElement(xmlData, "agents") + xmlData.append(createAgent(row[0], row[1], row[2], row[3])) + row = c.fetchone() + sqlLock.release() + return ret + + + +def getAMDSec(fileUUID, filePath, use, type, id, transferUUID, itemdirectoryPath): + global globalAmdSecCounter + global globalRightsMDCounter + globalAmdSecCounter += 1 + AMDID = "amdSec_%s" % (globalAmdSecCounter.__str__()) + AMD = etree.Element("amdSec") + AMD.set("ID", AMDID) + ret = (AMD, AMDID) + #tech MD + #digiprob MD + AMD.append(createTechMD(fileUUID)) + + if use == "original": + metadataAppliesToList = [(fileUUID, FileMetadataAppliesToType), (fileGroupIdentifier, SIPMetadataAppliesToType), (transferUUID.__str__(), TransferMetadataAppliesToType)] + for a in archivematicaGetRights(metadataAppliesToList, fileUUID): + globalRightsMDCounter +=1 + rightsMD = etree.SubElement(AMD, "rightsMD") + rightsMD.set("ID", "rightsMD_" + globalRightsMDCounter.__str__()) + mdWrap = newChild(rightsMD,"mdWrap") + mdWrap.set("MDTYPE", "PREMIS:RIGHTS") + xmlData = newChild(mdWrap, "xmlData") + xmlData.append(a) + + if transferUUID: + sql = "SELECT type FROM Transfers WHERE transferUUID = '%s';" % (transferUUID) + rows = databaseInterface.queryAllSQL(sql) + if rows[0][0] == "Dspace": + for a in archivematicaCreateMETSRightsDspaceMDRef(fileUUID, filePath, transferUUID, itemdirectoryPath): + globalRightsMDCounter +=1 + rightsMD = etree.SubElement(AMD, "rightsMD") + rightsMD.set("ID", "rightsMD_" + globalRightsMDCounter.__str__()) + rightsMD.append(a) + + + for a in createDigiprovMD(fileUUID): + AMD.append(a) + + for a in createDigiprovMDAgents(): + AMD.append(a) + return ret + +def getIncludedStructMap(): + global fileNameToFileID + ret = [] + transferMetadata = os.path.join(baseDirectoryPath, "metadata/transfers") + baseLocations = os.listdir(transferMetadata) + baseLocations.append(baseDirectoryPath) + for dir in baseLocations: + dirPath = os.path.join(transferMetadata, dir) + structMapXmlPath = os.path.join(dirPath, "metadata/mets_structmap.xml") + if not os.path.isdir(dirPath): + continue + if os.path.isfile(structMapXmlPath): + tree = etree.parse(structMapXmlPath) + root = tree.getroot() #TDOD - not root to return, but sub element structMap + #print etree.tostring(root) + structMap = root.find(metsBNS + "structMap") + ret.append(structMap) + for item in structMap.findall(".//" + metsBNS + "fptr"): + fileName = item.get("FILEID") + if fileName in fileNameToFileID: + #print fileName, " -> ", fileNameToFileID[fileName] + item.set("FILEID", fileNameToFileID[fileName]) + else: + print >>sys.stderr,"error: no fileUUID for ", fileName + sharedVariablesAcrossModules.globalErrorCount += 1 + for fileName, fileID in fileNameToFileID.iteritems(): + #locate file based on key + continue + print fileName + return ret + +#DMDID="dmdSec_01" for an object goes in here +# +def createFileSec(directoryPath, structMapDiv): + global fileNameToFileID + delayed = [] + filesInThisDirectory = [] + dspaceMetsDMDID = None + directoryContents = os.listdir(directoryPath) + directoryContentsTuples = [] + for item in directoryContents: + itemdirectoryPath = os.path.join(directoryPath, item) + if os.path.isdir(itemdirectoryPath): + delayed.append(item) + + elif os.path.isfile(itemdirectoryPath): + #find original file name + directoryPathSTR = itemdirectoryPath.replace(baseDirectoryPath, baseDirectoryPathString, 1) + sql = """SELECT Related.originalLocation AS 'derivedFromOriginalLocation', Current.originalLocation FROM Files AS Current LEFT OUTER JOIN Derivations ON Current.fileUUID = Derivations.derivedFileUUID LEFT OUTER JOIN Files AS Related ON Derivations.sourceFileUUID = Related.fileUUID WHERE Current.removedTime = 0 AND Current.%s = '%s' AND Current.currentLocation = '%s';""" % (fileGroupType, fileGroupIdentifier, MySQLdb.escape_string(directoryPathSTR)) + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + if row == None: + print >>sys.stderr, "No uuid for file: \"", directoryPathSTR, "\"" + sharedVariablesAcrossModules.globalErrorCount += 1 + sqlLock.release() + continue + while row != None: + #add to files in this directory tuple list + derivedFromOriginalName = row[0] + originalLocation = row[1] + if derivedFromOriginalName != None: + originalLocation = derivedFromOriginalName + originalName = os.path.basename(originalLocation) + u"/" #+ u"/" keeps normalized after original / is very uncommon in a file name + directoryContentsTuples.append((originalName, item,)) + row = c.fetchone() + sqlLock.release() + + #order files by their original name + for originalName, item in sorted(directoryContentsTuples, key=lambda listItems: listItems[0], cmp=sharedVariablesAcrossModules.collator.compare): + #item = unicode(item) + itemdirectoryPath = os.path.join(directoryPath, item) + + #myuuid = uuid.uuid4() + myuuid="" + #directoryPathSTR = itemdirectoryPath.replace(baseDirectoryPath + "objects", "objects", 1) + directoryPathSTR = itemdirectoryPath.replace(baseDirectoryPath, baseDirectoryPathString, 1) + + sql = """SELECT fileUUID, fileGrpUse, fileGrpUUID, transferUUID, label FROM Files WHERE removedTime = 0 AND %s = '%s' AND Files.currentLocation = '%s';""" % (fileGroupType, fileGroupIdentifier, MySQLdb.escape_string(directoryPathSTR)) + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + if row == None: + print >>sys.stderr, "No uuid for file: \"", directoryPathSTR, "\"" + sharedVariablesAcrossModules.globalErrorCount += 1 + sqlLock.release() + continue + while row != None: + myuuid = row[0] + use = row[1] + fileGrpUUID = row[2] + transferUUID = row[3] + label = row[4] + row = c.fetchone() + sqlLock.release() + + filename = ''.join(quoteattr(item).split("\"")[1:-1]) + directoryPathSTR = itemdirectoryPath.replace(baseDirectoryPath, "", 1) + #print filename, directoryPathSTR + + + FILEID="%s-%s" % (item, myuuid) + if FILEID[0].isdigit(): + FILEID = "_" + FILEID + + + # + fileDiv = etree.SubElement(structMapDiv, "div") + if label != None: + fileDiv.set("LABEL", label) + newChild(fileDiv, "fptr", sets=[("FILEID",FILEID)]) + fileNameToFileID[item] = FILEID + + GROUPID = "" + if fileGrpUUID: + GROUPID = "Group-%s" % (fileGrpUUID) + + elif use == "original" or use == "submissionDocumentation": + GROUPID = "Group-%s" % (myuuid) + + elif use == "preservation": + sql = "SELECT * FROM Derivations WHERE derivedFileUUID = '" + myuuid + "';" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + GROUPID = "Group-%s" % (row[1]) + row = c.fetchone() + sqlLock.release() + + elif use == "license" or use == "text/ocr" or use == "DSPACEMETS": + sql = """SELECT originalLocation FROM Files where fileUUID = '%s'""" % (myuuid) + originalLocation = databaseInterface.queryAllSQL(sql)[0][0] + sql = """SELECT fileUUID FROM Files WHERE removedTime = 0 AND %s = '%s' AND fileGrpUse = 'original' AND originalLocation LIKE '%s/%%'""" % (fileGroupType, fileGroupIdentifier, MySQLdb.escape_string(os.path.dirname(originalLocation)).replace("%", "\%")) + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + GROUPID = "Group-%s" % (row[0]) + row = c.fetchone() + sqlLock.release() + + elif use == "service": + fileFileIDPath = itemdirectoryPath.replace(baseDirectoryPath + "objects/service/", baseDirectoryPathString + "objects/") + objectNameExtensionIndex = fileFileIDPath.rfind(".") + fileFileIDPath = fileFileIDPath[:objectNameExtensionIndex + 1] + sql = """SELECT fileUUID FROM Files WHERE removedTime = 0 AND %s = '%s' AND fileGrpUse = 'original' AND currentLocation LIKE '%s%%'""" % (fileGroupType, fileGroupIdentifier, MySQLdb.escape_string(fileFileIDPath.replace("%", "\%"))) + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + GROUPID = "Group-%s" % (row[0]) + row = c.fetchone() + sqlLock.release() + + if transferUUID: + sql = "SELECT type FROM Transfers WHERE transferUUID = '%s';" % (transferUUID) + rows = databaseInterface.queryAllSQL(sql) + if rows[0][0] == "Dspace": + if use == "DSPACEMETS": + use = "submissionDocumentation" + admidApplyTo = None + if GROUPID=="": #is an AIP identifier + GROUPID = myuuid + admidApplyTo = structMapDiv.getparent() + + + LABEL = "mets.xml-%s" % (GROUPID) + dmdSec, ID = createMDRefDMDSec(LABEL, itemdirectoryPath, directoryPathSTR) + dmdSecs.append(dmdSec) + if admidApplyTo != None: + admidApplyTo.set("DMDID", ID) + else: + dspaceMetsDMDID = ID + + if GROUPID=="": + sharedVariablesAcrossModules.globalErrorCount += 1 + print >>sys.stderr, "No groupID for file: \"", directoryPathSTR, "\"" + + if use not in globalFileGrps: + print >>sys.stderr, "Invalid use: \"", use, "\"" + sharedVariablesAcrossModules.globalErrorCount += 1 + else: + file = newChild(globalFileGrps[use], "file", sets=[("ID",FILEID), ("GROUPID",GROUPID)]) + if use == "original": + filesInThisDirectory.append(file) + # + Flocat = newChild(file, "FLocat", sets=[(xlinkBNS +"href",directoryPathSTR), ("LOCTYPE","OTHER"), ("OTHERLOCTYPE", "SYSTEM")]) + if includeAmdSec: + AMD, ADMID = getAMDSec(myuuid, directoryPathSTR, use, fileGroupType, fileGroupIdentifier, transferUUID, itemdirectoryPath) + global amdSecs + amdSecs.append(AMD) + file.set("ADMID", ADMID) + + + if dspaceMetsDMDID != None: + for file in filesInThisDirectory: + file.set("DMDID", dspaceMetsDMDID) + + for item in sorted(delayed, cmp=sharedVariablesAcrossModules.collator.compare): + itemdirectoryPath = os.path.join(directoryPath, item) + createFileSec(itemdirectoryPath, newChild(structMapDiv, "div", sets=[("TYPE","directory"), ("LABEL",item)])) + + +if __name__ == '__main__': + sharedVariablesAcrossModules.collator = PyICU.Collator.createInstance(PyICU.Locale(opts.PyICULocale)) + while False: #used to stall the mcp and stop the client for testing this module + import time + time.sleep(10) + + if not baseDirectoryPath.endswith('/'): + baseDirectoryPath += '/' + structMap = etree.Element("structMap") + structMap.set("TYPE", "physical") + structMap.set("LABEL", "Archivematica default") + structMapDiv = newChild(structMap, "div", sets=[("TYPE","directory"), ("LABEL","%s-%s" % (os.path.basename(baseDirectoryPath[:-1]), fileGroupIdentifier))]) + #dmdSec, dmdSecID = createDublincoreDMDSec(SIP) + structMapDiv = newChild(structMapDiv, "div", sets=[("TYPE","directory"), ("LABEL","objects") ]) + createFileSec(os.path.join(baseDirectoryPath, "objects"), structMapDiv) + + + fileSec = etree.Element( "fileSec") + for group in globalFileGrpsUses: #globalFileGrps.itervalues(): + grp = globalFileGrps[group] + if len(grp) > 0: + fileSec.append(grp) + + rootNSMap = {None: metsNS} + rootNSMap.update(NSMAP) + root = etree.Element( "mets", \ + nsmap = rootNSMap, \ + attrib = { "{" + xsiNS + "}schemaLocation" : "http://www.loc.gov/METS/ http://www.loc.gov/standards/mets/version18/mets.xsd" } ) + + + + dc = createDublincoreDMDSec(SIPMetadataAppliesToType, fileGroupIdentifier) + if dc != None: + (dmdSec, ID) = dc + structMapDiv.set("DMDID", ID) + root.append(dmdSec) + + for dmdSec in dmdSecs: + root.append(dmdSec) + + for amdSec in amdSecs: + root.append(amdSec) + + root.append(fileSec) + root.append(structMap) + for structMapIncl in getIncludedStructMap(): + root.append(structMapIncl) + if False: #debug + print etree.tostring(root, pretty_print=True) + + #
+ #
+ #Recursive function for creating structmap and fileSec + tree = etree.ElementTree(root) + #tree.write(XMLFile) + tree.write(XMLFile, pretty_print=True, xml_declaration=True) + + writeTestXMLFile = True + if writeTestXMLFile: + import cgi + fileName = XMLFile + ".validatorTester.html" + fileContents = """ + + +
+ + +
+ + +
+
+ +
+
+ + + +""" % (cgi.escape(etree.tostring(root, pretty_print=True, xml_declaration=True))) + f = open(fileName, 'w') + f.write(fileContents) + f.close + + exit(sharedVariablesAcrossModules.globalErrorCount) diff --git a/src/MCPClient/lib/clientScripts/archivematicaCreateMETSRights.py b/src/MCPClient/lib/clientScripts/archivematicaCreateMETSRights.py new file mode 100755 index 0000000000..8ec5d488ba --- /dev/null +++ b/src/MCPClient/lib/clientScripts/archivematicaCreateMETSRights.py @@ -0,0 +1,288 @@ +#!/usr/bin/python -OO +# +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +#/src/dashboard/src/main/models.py + +from archivematicaXMLNamesSpace import * + +import os +import sys +import uuid +import lxml.etree as etree +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from countryCodes import getCodeForCountry +import databaseInterface +from sharedVariablesAcrossModules import sharedVariablesAcrossModules +from archivematicaFunctions import escape + + +def formatDate(date): + """hack fix for 0.8, easy dashboard insertion ISO 8061 -> edtfSimpleType""" + if date: + date = date.replace("/", "-") + return date + + +def archivematicaGetRights(metadataAppliesToList, fileUUID): + """[(fileUUID, fileUUIDTYPE), (sipUUID, sipUUIDTYPE), (transferUUID, transferUUIDType)]""" + ret = [] + rightsBasisActuallyOther = ["Policy", "Donor"] + for metadataAppliesToidentifier, metadataAppliesToType in metadataAppliesToList: + list = "RightsStatement.pk, rightsStatementIdentifierType, rightsStatementIdentifierType, rightsStatementIdentifierValue, rightsBasis, copyrightStatus, copyrightJurisdiction, copyrightStatusDeterminationDate, licenseTerms, copyrightApplicableStartDate, copyrightApplicableEndDate, licenseApplicableStartDate, licenseApplicableEndDate" + key = list.split(", ") + sql = """SELECT %s FROM RightsStatement LEFT JOIN RightsStatementCopyright ON RightsStatementCopyright.fkRightsStatement = RightsStatement.pk LEFT JOIN RightsStatementLicense ON RightsStatementLicense.fkRightsStatement = RightsStatement.pk WHERE metadataAppliesToidentifier = '%s' AND metadataAppliesToType = %s;""" % (list, metadataAppliesToidentifier, metadataAppliesToType) + rows = databaseInterface.queryAllSQL(sql) + if not rows: + continue + else: + for row in rows: + valueDic= {} + rightsStatement = etree.Element("rightsStatement", nsmap={None: premisNS}) + rightsStatement.set(xsiBNS+"schemaLocation", premisNS + " http://www.loc.gov/standards/premis/v2/premis-v2-2.xsd") + #rightsStatement.set("version", "2.1") #cvc-complex-type.3.2.2: Attribute 'version' is not allowed to appear in element 'rightsStatement'. + ret.append(rightsStatement) + for i in range(len(key)): + valueDic[key[i]] = row[i] + + rightsStatementIdentifier = etree.SubElement(rightsStatement, "rightsStatementIdentifier") + if valueDic["rightsStatementIdentifierValue"]: + etree.SubElement(rightsStatementIdentifier, "rightsStatementIdentifierType").text = valueDic["rightsStatementIdentifierType"] + etree.SubElement(rightsStatementIdentifier, "rightsStatementIdentifierValue").text = valueDic["rightsStatementIdentifierValue"] + else: + etree.SubElement(rightsStatementIdentifier, "rightsStatementIdentifierType").text = "UUID" + etree.SubElement(rightsStatementIdentifier, "rightsStatementIdentifierValue").text = uuid.uuid4().__str__() + if valueDic["rightsBasis"] in rightsBasisActuallyOther: + etree.SubElement(rightsStatement, "rightsBasis").text = "Other" + else: + etree.SubElement(rightsStatement, "rightsBasis").text = valueDic["rightsBasis"] + + #copright information + if valueDic["rightsBasis"].lower() in ["copyright"]: + sql = """SELECT pk, copyrightStatus, copyrightJurisdiction, copyrightStatusDeterminationDate, copyrightApplicableStartDate, copyrightApplicableEndDate, copyrightApplicableEndDateOpen FROM RightsStatementCopyright WHERE fkRightsStatement = %d""" % (valueDic["RightsStatement.pk"]) + rows2 = databaseInterface.queryAllSQL(sql) + for row2 in rows2: + copyrightInformation = etree.SubElement(rightsStatement, "copyrightInformation") + etree.SubElement(copyrightInformation, "copyrightStatus").text = valueDic["copyrightStatus"] + copyrightJurisdiction = valueDic["copyrightJurisdiction"] + copyrightJurisdictionCode = getCodeForCountry(copyrightJurisdiction.__str__().upper()) + if copyrightJurisdictionCode != None: + copyrightJurisdiction = copyrightJurisdictionCode + etree.SubElement(copyrightInformation, "copyrightJurisdiction").text = copyrightJurisdiction + etree.SubElement(copyrightInformation, "copyrightStatusDeterminationDate").text = formatDate(valueDic["copyrightStatusDeterminationDate"]) + #copyrightNote Repeatable + sql = "SELECT copyrightNote FROM RightsStatementCopyrightNote WHERE fkRightsStatementCopyrightInformation = %d;" % (row2[0]) + rows3 = databaseInterface.queryAllSQL(sql) + for row3 in rows3: + etree.SubElement(copyrightInformation, "copyrightNote").text = row3[0] + + #RightsStatementCopyrightDocumentationIdentifier + getDocumentationIdentifier(valueDic["RightsStatement.pk"], copyrightInformation) + + copyrightApplicableDates = etree.SubElement(copyrightInformation, "copyrightApplicableDates") + if valueDic["copyrightApplicableStartDate"]: + etree.SubElement(copyrightApplicableDates, "startDate").text = formatDate(valueDic["copyrightApplicableStartDate"]) + if row2[6]: #, copyrightApplicableEndDateOpen + etree.SubElement(copyrightApplicableDates, "endDate").text = "OPEN" + elif valueDic["copyrightApplicableEndDate"]: + etree.SubElement(copyrightApplicableDates, "endDate").text = formatDate(valueDic["copyrightApplicableEndDate"]) + + elif valueDic["rightsBasis"].lower() in ["license"]: + sql = """SELECT licenseTerms, licenseApplicableStartDate, licenseApplicableEndDate, licenseDocumentationIdentifierType, licenseDocumentationIdentifierValue, RightsStatementLicense.pk, licenseDocumentationIdentifierRole, licenseApplicableEndDateOpen + FROM RightsStatementLicense JOIN RightsStatementLicenseDocumentationIdentifier ON RightsStatementLicenseDocumentationIdentifier.fkRightsStatementLicense = RightsStatementLicense.pk WHERE RightsStatementLicense.fkRightsStatement = %d;""" % (valueDic["RightsStatement.pk"]) + rows2 = databaseInterface.queryAllSQL(sql) + for row2 in rows2: + licenseInformation = etree.SubElement(rightsStatement, "licenseInformation") + + licenseDocumentIdentifier = etree.SubElement(licenseInformation, "licenseDocumentationIdentifier") + etree.SubElement(licenseDocumentIdentifier, "licenseDocumentationIdentifierType").text = row2[3] + etree.SubElement(licenseDocumentIdentifier, "licenseDocumentationIdentifierValue").text = row2[4] + etree.SubElement(licenseDocumentIdentifier, "licenseDocumentationRole").text = row2[6] + + etree.SubElement(licenseInformation, "licenseTerms").text = valueDic["licenseTerms"] + + sql = "SELECT licenseNote FROM RightsStatementLicenseNote WHERE fkRightsStatementLicense = %d;" % (row2[5]) + rows3 = databaseInterface.queryAllSQL(sql) + for row3 in rows3: + etree.SubElement(licenseInformation, "licenseNote").text = row3[0] + + licenseApplicableDates = etree.SubElement(licenseInformation, "licenseApplicableDates") + if valueDic["licenseApplicableStartDate"]: + etree.SubElement(licenseApplicableDates, "startDate").text = formatDate(valueDic["licenseApplicableStartDate"]) + if row2[7]: #licenseApplicableEndDateOpen + etree.SubElement(licenseApplicableDates, "endDate").text = "OPEN" + elif valueDic["licenseApplicableEndDate"]: + etree.SubElement(licenseApplicableDates, "endDate").text = formatDate(valueDic["licenseApplicableEndDate"]) + + elif valueDic["rightsBasis"].lower() in ["statute"]: + #4.1.5 statuteInformation (O, R) + getstatuteInformation(valueDic["RightsStatement.pk"], rightsStatement) + + elif valueDic["rightsBasis"].lower() in ["donor", "policy", "other"]: + otherRightsInformation = etree.SubElement(rightsStatement, "otherRightsInformation") + sql = """SELECT pk, otherRightsBasis, otherRightsApplicableStartDate, otherRightsApplicableEndDate, otherRightsApplicableEndDateOpen FROM RightsStatementOtherRightsInformation WHERE RightsStatementOtherRightsInformation.fkRightsStatement = %d;""" % (valueDic["RightsStatement.pk"]) + rows2 = databaseInterface.queryAllSQL(sql) + for row2 in rows2: + #otherRightsDocumentationIdentifier + sql = """SELECT otherRightsDocumentationIdentifierType, otherRightsDocumentationIdentifierValue, otherRightsDocumentationIdentifierRole FROM RightsStatementOtherRightsDocumentationIdentifier WHERE fkRightsStatementotherRightsInformation = %s """ % (row2[0]) + rows3 = databaseInterface.queryAllSQL(sql) + for row3 in rows3: + otherRightsDocumentationIdentifier = etree.SubElement(otherRightsInformation, "otherRightsDocumentationIdentifier") + etree.SubElement(otherRightsDocumentationIdentifier, "otherRightsDocumentationIdentifierType").text = row3[0] + etree.SubElement(otherRightsDocumentationIdentifier, "otherRightsDocumentationIdentifierValue").text = row3[1] + etree.SubElement(otherRightsDocumentationIdentifier, "otherRightsDocumentationRole").text = row3[2] + + otherRightsBasis = row2[1] + + if not otherRightsBasis or valueDic["rightsBasis"] in rightsBasisActuallyOther: #not 100% + otherRightsBasis = valueDic["rightsBasis"] + etree.SubElement(otherRightsInformation, "otherRightsBasis").text = otherRightsBasis + + + otherRightsApplicableStartDate = row2[2] + otherRightsApplicableEndDate = row2[3] + otherRightsApplicableEndDateOpen = row2[4] + if otherRightsApplicableStartDate or otherRightsApplicableEndDate: + otherRightsApplicableDates = etree.SubElement(otherRightsInformation, "otherRightsApplicableDates") + if otherRightsApplicableStartDate: + etree.SubElement(otherRightsApplicableDates, "startDate").text = formatDate(otherRightsApplicableStartDate) + if otherRightsApplicableEndDateOpen: + etree.SubElement(otherRightsApplicableDates, "endDate").text = "OPEN" + elif otherRightsApplicableEndDate: + etree.SubElement(otherRightsApplicableDates, "endDate").text = formatDate(otherRightsApplicableEndDate) + + #otherRightsNote Repeatable + sql = "SELECT otherRightsNote FROM RightsStatementOtherRightsNote WHERE fkRightsStatementOtherRightsInformation = %d;" % (row2[0]) + rows3 = databaseInterface.queryAllSQL(sql) + for row3 in rows3: + etree.SubElement(otherRightsInformation, "otherRightsNote").text = row3[0] + + #4.1.6 rightsGranted (O, R) + getrightsGranted(valueDic["RightsStatement.pk"], rightsStatement) + + #4.1.7 linkingObjectIdentifier (O, R) + linkingObjectIdentifier = etree.SubElement(rightsStatement, "linkingObjectIdentifier") + etree.SubElement(linkingObjectIdentifier, "linkingObjectIdentifierType").text = "UUID" + etree.SubElement(linkingObjectIdentifier, "linkingObjectIdentifierValue").text = fileUUID + + + #4.1.8 linkingAgentIdentifier (O, R) + #sql = """SELECT agentIdentifierType, agentIdentifierValue, agentName, agentType FROM Agents;""" + #c, sqlLock = databaseInterface.querySQL(sql) + #row = c.fetchone() + #while row != None: + # linkingAgentIdentifier = etree.SubElement(rightsStatement, "linkingAgentIdentifier") + # etree.SubElement(linkingAgentIdentifier, "linkingAgentIdentifierType").text = row[0] + # etree.SubElement(linkingAgentIdentifier, "linkingAgentIdentifierValue").text = row[1] + # row = c.fetchone() + #sqlLock.release() + if False: # Issue 873: + break + return ret + +def getDocumentationIdentifier(pk, parent): + sql = "SELECT pk, copyrightDocumentationIdentifierType, copyrightDocumentationIdentifierValue, copyrightDocumentationIdentifierRole FROM RightsStatementCopyrightDocumentationIdentifier WHERE fkRightsStatementCopyrightInformation = %d" % (pk) + rows = databaseInterface.queryAllSQL(sql) + for row in rows: + statuteInformation = etree.SubElement(parent, "copyrightDocumentationIdentifier") + etree.SubElement(statuteInformation, "copyrightDocumentationIdentifierType").text = row[1] + etree.SubElement(statuteInformation, "copyrightDocumentationIdentifierValue").text = row[2] + etree.SubElement(statuteInformation, "copyrightDocumentationRole").text = row[3] + + +def getstatuteInformation(pk, parent): + sql = "SELECT pk, statuteJurisdiction, statuteCitation, statuteInformationDeterminationDate, statuteapplicablestartdate, statuteapplicableenddate, statuteApplicableEndDateOpen FROM RightsStatementStatuteInformation WHERE fkRightsStatement = %d" % (pk) + #print sql + rows = databaseInterface.queryAllSQL(sql) + for row in rows: + statuteInformation = etree.SubElement(parent, "statuteInformation") + etree.SubElement(statuteInformation, "statuteJurisdiction").text = row[1] + etree.SubElement(statuteInformation, "statuteCitation").text = row[2] + etree.SubElement(statuteInformation, "statuteInformationDeterminationDate").text = formatDate(row[3]) + + #statuteNote Repeatable + sql = "SELECT statuteNote FROM RightsStatementStatuteInformationNote WHERE fkRightsStatementStatuteInformation = %d;" % (row[0]) + rows2 = databaseInterface.queryAllSQL(sql) + for row2 in rows2: + etree.SubElement(statuteInformation, "statuteNote").text = row2[0] + + sql = """SELECT statuteDocumentationIdentifierType, statuteDocumentationIdentifierValue, statuteDocumentationIdentifierRole FROM RightsStatementStatuteDocumentationIdentifier WHERE fkRightsStatementStatuteInformation = %s """ % (row[0]) + rows2 = databaseInterface.queryAllSQL(sql) + for row2 in rows2: + statuteDocumentationIdentifier = etree.SubElement(statuteInformation, "statuteDocumentationIdentifier") + etree.SubElement(statuteDocumentationIdentifier, "statuteDocumentationIdentifierType").text = row2[0] + etree.SubElement(statuteDocumentationIdentifier, "statuteDocumentationIdentifierValue").text = row2[1] + etree.SubElement(statuteDocumentationIdentifier, "statuteDocumentationRole").text = row2[2] + + statuteapplicablestartdate = row[4] + statuteapplicableenddate = row[5] + statuteApplicableEndDateOpen = row[6] + if statuteapplicablestartdate or statuteapplicableenddate or statuteApplicableEndDateOpen: + statuteApplicableDates = etree.SubElement(statuteInformation, "statuteApplicableDates") + if statuteapplicablestartdate: + etree.SubElement(statuteApplicableDates, "startDate").text = formatDate(statuteapplicablestartdate) + if statuteApplicableEndDateOpen: + etree.SubElement(statuteApplicableDates, "endDate").text = "OPEN" + elif statuteapplicableenddate: + etree.SubElement(statuteApplicableDates, "endDate").text = formatDate(statuteapplicableenddate) + + +def getrightsGranted(pk, parent): + sql = "SELECT RightsStatementRightsGranted.pk, act, startDate, endDate, endDateOpen FROM RightsStatementRightsGranted WHERE fkRightsStatement = %d" % (pk) + rows = databaseInterface.queryAllSQL(sql) + for row in rows: + rightsGranted = etree.SubElement(parent, "rightsGranted") + etree.SubElement(rightsGranted, "act").text = row[1] + + restriction = "Undefined" + sql = """SELECT restriction FROM RightsStatementRightsGrantedRestriction WHERE RightsStatementRightsGrantedRestriction.fkRightsStatementRightsGranted = %s """ % (row[0]) + rows2 = databaseInterface.queryAllSQL(sql) + for row2 in rows2: + restriction = row2[0] + if not restriction.lower() in ["disallow", "conditional", "allow"]: + print >>sys.stderr, "The value of element restriction must be: 'Allow', 'Disallow', or 'Conditional':", restriction + sharedVariablesAcrossModules.globalErrorCount +=1 + etree.SubElement(rightsGranted, "restriction").text = restriction + + if row[2] or row[3] or row[4]: + if restriction.lower() in ["allow"]: + termOfGrant = etree.SubElement(rightsGranted, "termOfGrant") + elif restriction.lower() in ["disallow", "conditional"]: + termOfGrant = etree.SubElement(rightsGranted, "termOfRestriction") + else: + print >>sys.stderr, "The value of element restriction must be: 'Allow', 'Dissallow', or 'Conditional'" + sharedVariablesAcrossModules.globalErrorCount +=1 + continue + + if row[2]: + etree.SubElement(termOfGrant, "startDate").text = formatDate(row[2]) + if row[4]: + etree.SubElement(termOfGrant, "endDate").text = "OPEN" + elif row[3]: + etree.SubElement(termOfGrant, "endDate").text = formatDate(row[3]) + + #4.1.6.4 rightsGrantedNote (O, R) + sql = "SELECT rightsGrantedNote FROM RightsStatementRightsGrantedNote WHERE fkRightsStatementRightsGranted = %d;" % (row[0]) + rows2 = databaseInterface.queryAllSQL(sql) + for row2 in rows2: + etree.SubElement(rightsGranted, "rightsGrantedNote").text = row2[0] diff --git a/src/MCPClient/lib/clientScripts/archivematicaCreateMETSRightsDspaceMDRef.py b/src/MCPClient/lib/clientScripts/archivematicaCreateMETSRightsDspaceMDRef.py new file mode 100755 index 0000000000..74180a7040 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/archivematicaCreateMETSRightsDspaceMDRef.py @@ -0,0 +1,102 @@ +#!/usr/bin/python -OO +# +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +from archivematicaXMLNamesSpace import * +import os +import sys +import lxml.etree as etree +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +from sharedVariablesAcrossModules import sharedVariablesAcrossModules + + +def createMDRefDMDSec(LABEL, itemdirectoryPath, directoryPathSTR): + XPTR = "xpointer(id(" + tree = etree.parse(itemdirectoryPath) + root = tree.getroot() + a = """ +""" + for item in root.findall("{http://www.loc.gov/METS/}amdSec/{http://www.loc.gov/METS/}rightsMD"): + #print "rights id:", item.get("ID") + XPTR = "%s %s" % (XPTR, item.get("ID")) + XPTR = XPTR.replace(" ", "'", 1) + "'))" + mdRef = etree.Element("mdRef") + mdRef.set("LABEL", LABEL) + mdRef.set(xlinkBNS +"href", directoryPathSTR) + mdRef.set("MDTYPE", "OTHER") + mdRef.set("OTHERMDTYPE", "METSRIGHTS") + mdRef.set("LOCTYPE","OTHER") + mdRef.set("OTHERLOCTYPE", "SYSTEM") + mdRef.set("XPTR", XPTR) + return mdRef + + + +def archivematicaCreateMETSRightsDspaceMDRef(fileUUID, filePath, transferUUID, itemdirectoryPath): + ret = [] + try: + print fileUUID, filePath + #find the mets file + sql = "SELECT fileUUID, currentLocation FROM Files WHERE currentLocation = '%%SIPDirectory%%%s/mets.xml' AND transferUUID = '%s';" % (os.path.dirname(filePath), transferUUID) + rows = databaseInterface.queryAllSQL(sql) + for row in rows: + metsFileUUID = row[0] + metsLoc = row[1].replace("%SIPDirectory%", "", 1) + metsLocation = os.path.join(os.path.dirname(itemdirectoryPath), "mets.xml") + LABEL = "mets.xml-%s" % (metsFileUUID) + ret.append(createMDRefDMDSec(LABEL, metsLocation, metsLoc)) + + base = os.path.dirname(os.path.dirname(itemdirectoryPath)) + base2 = os.path.dirname(os.path.dirname(filePath)) + + for dir in os.listdir(base): + fullDir = os.path.join(base, dir) + fullDir2 = os.path.join(base2, dir) + print fullDir + if dir.startswith("ITEM"): + print "continue" + continue + if not os.path.isdir(fullDir): + continue + sql = "SELECT fileUUID, currentLocation FROM Files WHERE currentLocation = '%%SIPDirectory%%%s/mets.xml' AND transferUUID = '%s';" % (fullDir2, transferUUID) + print sql + rows = databaseInterface.queryAllSQL(sql) + for row in rows: + print row + metsFileUUID = row[0] + metsLoc = row[1].replace("%SIPDirectory%", "", 1) + metsLocation = os.path.join(fullDir, "mets.xml") + print metsLocation + LABEL = "mets.xml-%s" % (metsFileUUID) + ret.append(createMDRefDMDSec(LABEL, metsLocation, metsLoc)) + + + + + except Exception as inst: + print >>sys.stderr, "Error creating mets dspace mdref", fileUUID, filePath + print >>sys.stderr, type(inst), inst.args + sharedVariablesAcrossModules.globalErrorCount +=1 + + return ret diff --git a/src/MCPClient/lib/clientScripts/archivematicaFITS.py b/src/MCPClient/lib/clientScripts/archivematicaFITS.py new file mode 100755 index 0000000000..a408c60bc4 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/archivematicaFITS.py @@ -0,0 +1,310 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +import sys +import shlex +import lxml.etree as etree +import uuid +import subprocess +import os +import uuid + +from createXmlEventsAssist import createEvent +from createXmlEventsAssist import createOutcomeInformation +from createXmlEventsAssist import createLinkingAgentIdentifier +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from archivematicaFunctions import getTagged +from archivematicaFunctions import escapeForCommand +from databaseFunctions import insertIntoFilesFits +from databaseFunctions import insertIntoEvents +from databaseFunctions import insertIntoFilesIDs + + +excludeJhoveProperties = True +formats = [] +FITSNS = "{http://hul.harvard.edu/ois/xml/ns/fits/fits_output}" + + +def excludeJhoveProperties(fits): + """Exclude from because that field contains unnecessary excess data and the key data are covered by output from other FITS tools.""" + prefix = "" + formatValidation = None + #print fits + #print etree.tostring(fits, pretty_print=True) + + tools = getTagged(getTagged(fits, FITSNS + "toolOutput")[0], FITSNS + "tool") + for tool in tools: + if tool.get("name") == "Jhove": + formatValidation = tool + break + if formatValidation == None: + print >>sys.stderr, "No format validation tool (Jhove)." + return fits + repInfo = getTagged(formatValidation, "repInfo")[0] + properties = getTagged(repInfo, "properties") + + if len(properties): + repInfo.remove(properties[0]) + return fits + + +def formatValidationFITSAssist(fits): + prefix = "" + formatValidation = None + + tools = getTagged(getTagged(fits, FITSNS + "toolOutput")[0], FITSNS + "tool") + for tool in tools: + if tool.get("name") == "Jhove": + formatValidation = tool + break + if formatValidation == None: + print >>sys.stderr, "No format validation tool (Jhove)." + quit(3) + + repInfo = getTagged(formatValidation, "repInfo")[0] + #program="DROID"; version="3.0" + eventDetailText = "program=\"" + formatValidation.get("name") \ + + "\"; version=\"" + formatValidation.get("version") + "\"" + + + #Well-Formed and valid + status = getTagged( repInfo, prefix + "status")[0].text + eventOutcomeText = "fail" + if status == "Well-Formed and valid": + eventOutcomeText = "pass" + + # format="Windows Bitmap"; version="3.0"; result="Well-formed and valid" + format = getTagged(repInfo, prefix + "format")[0].text + versionXML = getTagged(repInfo, prefix + "version") + version = "" + if len(versionXML): + version = versionXML[0].text + eventOutcomeDetailNote = "format=\"" + format + if version: + eventOutcomeDetailNote += "\"; version=\"" + version + eventOutcomeDetailNote += "\"; result=\"" + status + "\"" + + return tuple([eventDetailText, eventOutcomeText, eventOutcomeDetailNote]) #tuple([1, 2, 3]) returns (1, 2, 3). + + +def formatIdentificationFITSAssist(fits, fileUUID): + prefix = "{http://www.nationalarchives.gov.uk/pronom/FileCollection}" + formatIdentification = None + + tools = getTagged(getTagged(fits, FITSNS + "toolOutput")[0], FITSNS + "tool") + for tool in tools: + if tool.get("name") == "Droid": + formatIdentification = tool + break + #program="DROID"; version="3.0" + eventDetailText = "program=\"" + formatIdentification.get("name") \ + + "\"; version=\"" + formatIdentification.get("version") + "\"" + + #positive + + fileCollection = getTagged(formatIdentification, prefix + "FileCollection")[0] + IdentificationFile = getTagged(fileCollection, prefix + "IdentificationFile")[0] + eventOutcomeText = IdentificationFile.get( "IdentQuality") + + #fmt/116 + # + fileFormatHits = getTagged(IdentificationFile, prefix + "FileFormatHit") + eventOutcomeDetailNotes = [] + eventOutcomeDetailNote = "" + for fileFormatHit in fileFormatHits: + format = etree.Element("format") + if len(fileFormatHit): + formatIDSQL = {"fileUUID":fileUUID, \ + "formatName":"", \ + "formatVersion":"", \ + "formatRegistryName":"PRONOM", \ + "formatRegistryKey":""} + eventOutcomeDetailNote = getTagged(fileFormatHit, prefix + "PUID")[0].text + + #formatDesignation = etree.SubElement(format, "formatDesignation") + formatName = getTagged(fileFormatHit, prefix + "Name") + formatVersion = getTagged(fileFormatHit, prefix + "Version") + + + if len(formatName): + #etree.SubElement(formatDesignation, "formatName").text = formatName[0].text + formatIDSQL["formatName"] = formatName[0].text + if len(formatVersion): + #etree.SubElement(formatDesignation, "formatVersion").text = formatVersion[0].text + formatIDSQL["formatVersion"] = formatVersion[0].text + formatRegistry = etree.SubElement(format, "formatRegistry") + + PUID = getTagged(fileFormatHit, prefix + "PUID") + if len(PUID): + #etree.SubElement(formatRegistry, "formatRegistryName").text = "PRONOM" + #etree.SubElement(formatRegistry, "formatRegistryKey").text = PUID[0].text + formatIDSQL["formatRegistryKey"] = PUID[0].text + formats.append(format) + print formatIDSQL + insertIntoFilesIDs(fileUUID=fileUUID, \ + formatName=formatIDSQL["formatName"], \ + formatVersion=formatIDSQL["formatVersion"], \ + formatRegistryName=formatIDSQL["formatRegistryName"], \ + formatRegistryKey=formatIDSQL["formatRegistryKey"]) + else: + eventOutcomeDetailNote = "No Matching Format Found" + formatDesignation = etree.SubElement(format, "formatDesignation") + etree.SubElement(formatDesignation, "formatName").text = "Unknown" + formats.append(format) + eventOutcomeDetailNotes.append(eventOutcomeDetailNote) + return tuple([eventDetailText, eventOutcomeText, eventOutcomeDetailNotes]) #tuple([1, 2, 3]) returns (1, 2, 3). + + +def includeFits(fits, xmlFile, date, eventUUID, fileUUID): + global exitCode + ##eventOutcome = createOutcomeInformation( eventOutcomeDetailNote = uuid) + #TO DO... Gleam the event outcome information from the output + + #print etree.tostring(fits, pretty_print=True) + # + #try: + eventDetailText, eventOutcomeText, eventOutcomeDetailNotes = formatIdentificationFITSAssist(fits, fileUUID) + #except: + if 0: + eventDetailText = "Failed" + eventOutcomeText = "Failed" + eventOutcomeDetailNotes = ["Failed"] + exitCode += 4 + outcomeInformation = createOutcomeInformation( "To be removed", eventOutcomeText) + #formatIdentificationEvent = createEvent( eventUUID, "format identification", \ + # eventDateTime=date, \ + # eventDetailText=eventDetailText, \ + # eOutcomeInformation=outcomeInformation) + + #eventOutcomeInformation = getTagged(formatIdentificationEvent, "eventOutcomeInformation")[0] + #eventOutcomeDetail = getTagged(eventOutcomeInformation, "eventOutcomeDetail")[0] + #eventOutcomeInformation.remove(eventOutcomeDetail) + + for eventOutcomeDetailNote in eventOutcomeDetailNotes: + #eventOutcomeDetail = etree.SubElement(eventOutcomeInformation, "eventOutcomeDetail") + #etree.SubElement(eventOutcomeDetail, "eventOutcomeDetailNote").text = eventOutcomeDetailNote + + insertIntoEvents(fileUUID=fileUUID, \ + eventIdentifierUUID=uuid.uuid4().__str__(), \ + eventType="format identification", \ + eventDateTime=date, \ + eventDetail=eventDetailText, \ + eventOutcome=eventOutcomeText, \ + eventOutcomeDetailNote=eventOutcomeDetailNote) + + # + try: + eventDetailText, eventOutcomeText, eventOutcomeDetailNote = formatValidationFITSAssist(fits) + except: + eventDetailText = "Failed" + eventOutcomeText = "Failed" + eventOutcomeDetailNotes = "Failed" + exitCode += 3 + #outcomeInformation = createOutcomeInformation( eventOutcomeDetailNote, eventOutcomeText) + #formatValidationEvent = createEvent( uuid.uuid4().__str__(), "validation", \ + # eventDateTime=date, \ + # eventDetailText=eventDetailText, \ + # eOutcomeInformation=outcomeInformation) + insertIntoEvents(fileUUID=fileUUID, \ + eventIdentifierUUID=uuid.uuid4().__str__(), \ + eventType="validation", \ + eventDateTime=date, \ + eventDetail=eventDetailText, \ + eventOutcome=eventOutcomeText, \ + eventOutcomeDetailNote=eventOutcomeDetailNote) + + #tree = etree.parse( xmlFile ) + #root = tree.getroot() + + #events = getTagged(root, "events")[0] + #events.append(formatIdentificationEvent) + #events.append(formatValidationEvent) + + #objectCharacteristics = getTagged(getTagged(root, "object")[0], "objectCharacteristics")[0] + #for format in formats: + # objectCharacteristics.append(format) + #objectCharacteristicsExtension = etree.SubElement(objectCharacteristics, "objectCharacteristicsExtension") + #objectCharacteristicsExtension.append(fits) + + + + #tree = etree.ElementTree(root) + #tree.write(xmlFile) + +if __name__ == '__main__': + global exitCode + exitCode = 0 + target = sys.argv[1] + XMLfile = sys.argv[2] + date = sys.argv[3] + eventUUID = sys.argv[4] + fileUUID = sys.argv[5] + fileGrpUse = sys.argv[6] + + if fileGrpUse in ["DSPACEMETS"]: + print "file's fileGrpUse in exclusion list, skipping" + exit(0) + + + tempFile="/tmp/" + uuid.uuid4().__str__() + + command = "fits.sh -i \"" + escapeForCommand(target) + "\" -o \"" + tempFile + "\"" + #print >>sys.stderr, command + #print >>sys.stderr, shlex.split(command) + try: + p = subprocess.Popen(shlex.split(command), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + #p.wait() + output = p.communicate() + retcode = p.returncode + + if output[0] != "": + print output[0] + if output[1] != "": + print >>sys.stderr, output[1] + + #it executes check for errors + if retcode != 0: + print >>sys.stderr, "error code:" + retcode.__str__() + print output[1]# sError + #return retcode + quit(retcode) + try: + tree = etree.parse(tempFile) + except: + os.remove(tempFile) + print >>sys.stderr, "Failed to read Fits's xml." + exit(2) + fits = tree.getroot() + os.remove(tempFile) + #fits = etree.XML(output[0]) + if excludeJhoveProperties: + fits = excludeJhoveProperties(fits) + insertIntoFilesFits(fileUUID, etree.tostring(fits, pretty_print=False)) + includeFits(fits, XMLfile, date, eventUUID, fileUUID) + + except OSError, ose: + print >>sys.stderr, "Execution failed:", ose + #return 1 + exit(1) + exit(exitCode) diff --git a/src/MCPClient/lib/clientScripts/archivematicaMoveSIP.py b/src/MCPClient/lib/clientScripts/archivematicaMoveSIP.py new file mode 100755 index 0000000000..8651282b07 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/archivematicaMoveSIP.py @@ -0,0 +1,57 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +import os +import subprocess +import shlex +import sys +import MySQLdb +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +from executeOrRunSubProcess import executeOrRun +from fileOperations import renameAsSudo + +def updateDB(dst, sipUUID): + sql = """UPDATE SIPs SET currentPath='""" + MySQLdb.escape_string(dst) + """' WHERE sipUUID='""" + sipUUID + """';""" + databaseInterface.runSQL(sql) + +def moveSIP(src, dst, sipUUID, sharedDirectoryPath): + # os.rename(src, dst) + if src.endswith("/"): + src = src[:-1] + + dest = dst.replace(sharedDirectoryPath, "%sharedPath%", 1) + if dest.endswith("/"): + dest = os.path.join(dest, os.path.basename(src)) + if dest.endswith("/."): + dest = os.path.join(dest[:-1], os.path.basename(src)) + updateDB(dest + "/", sipUUID) + + renameAsSudo(src, dst) + +if __name__ == '__main__': + src = sys.argv[1] + dst = sys.argv[2] + sipUUID = sys.argv[3] + sharedDirectoryPath = sys.argv[4] + moveSIP(src, dst, sipUUID, sharedDirectoryPath) diff --git a/src/MCPClient/lib/clientScripts/archivematicaMoveTransfer.py b/src/MCPClient/lib/clientScripts/archivematicaMoveTransfer.py new file mode 100755 index 0000000000..d66a9fb32f --- /dev/null +++ b/src/MCPClient/lib/clientScripts/archivematicaMoveTransfer.py @@ -0,0 +1,57 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +import os +import subprocess +import shlex +import sys +import MySQLdb +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +from executeOrRunSubProcess import executeOrRun +from fileOperations import renameAsSudo + +def updateDB(dst, transferUUID): + sql = """UPDATE Transfers SET currentLocation='""" + MySQLdb.escape_string(dst) + """' WHERE transferUUID='""" + transferUUID + """';""" + databaseInterface.runSQL(sql) + +def moveSIP(src, dst, transferUUID, sharedDirectoryPath): + # os.rename(src, dst) + if src.endswith("/"): + src = src[:-1] + + dest = dst.replace(sharedDirectoryPath, "%sharedPath%", 1) + if dest.endswith("/"): + dest = os.path.join(dest, os.path.basename(src)) + if dest.endswith("/."): + dest = os.path.join(dest[:-1], os.path.basename(src)) + updateDB(dest + "/", transferUUID) + + renameAsSudo(src, dst) + +if __name__ == '__main__': + src = sys.argv[1] + dst = sys.argv[2] + transferUUID = sys.argv[3] + sharedDirectoryPath = sys.argv[4] + moveSIP(src, dst, transferUUID, sharedDirectoryPath) diff --git a/src/MCPClient/lib/clientScripts/archivematicaSetTransferType.py b/src/MCPClient/lib/clientScripts/archivematicaSetTransferType.py new file mode 100755 index 0000000000..b285b2b21a --- /dev/null +++ b/src/MCPClient/lib/clientScripts/archivematicaSetTransferType.py @@ -0,0 +1,33 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface + +if __name__ == '__main__': + transferUUID = sys.argv[1] + transferType = sys.argv[2] + + sql = """UPDATE Transfers SET type='""" + transferType + """' WHERE transferUUID='""" + transferUUID + """';""" + databaseInterface.runSQL(sql) diff --git a/src/MCPClient/lib/clientScripts/archivematicaUpdateSizeAndChecksum.py b/src/MCPClient/lib/clientScripts/archivematicaUpdateSizeAndChecksum.py new file mode 100755 index 0000000000..80e04fee82 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/archivematicaUpdateSizeAndChecksum.py @@ -0,0 +1,40 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +import sys +from optparse import OptionParser +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from fileOperations import updateSizeAndChecksum + +if __name__ == '__main__': + parser = OptionParser() + parser.add_option("-i", "--fileUUID", action="store", dest="fileUUID", default="") + parser.add_option("-p", "--filePath", action="store", dest="filePath", default="") + parser.add_option("-d", "--date", action="store", dest="date", default="") + parser.add_option("-u", "--eventIdentifierUUID", action="store", dest="eventIdentifierUUID", default="") + (opts, args) = parser.parse_args() + + updateSizeAndChecksum(opts.fileUUID, \ + opts.filePath, \ + opts.date, \ + opts.eventIdentifierUUID) diff --git a/src/MCPClient/lib/clientScripts/archivematicaVerifyMets.sh b/src/MCPClient/lib/clientScripts/archivematicaVerifyMets.sh new file mode 100755 index 0000000000..13da11e0e6 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/archivematicaVerifyMets.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + + +set -e +sipPath="$1" +metsFile="${sipPath}metadata/mets_structmap.xml" +schema="/usr/lib/archivematica/archivematicaCommon/externals/mets/mets.xsd" + +#not used... doesn't put file in correct location either +if [ ! -f "${schema}" ]; then + echo TODO + echo getting mets.xsd + wget http://www.loc.gov/standards/mets/mets.xsd /usr/lib/archivematica/archivematicaCommon/externals/mets/mets.xsd +fi + +if [ -f "${metsFile}" ]; then + xmllint --noout --schema "$schema" "${sipPath}metadata/mets_structmap.xml" +else + echo No metadata/mets_structmap.xml file to verify. +fi diff --git a/src/MCPClient/lib/clientScripts/archivematicaXMLNamesSpace.py b/src/MCPClient/lib/clientScripts/archivematicaXMLNamesSpace.py new file mode 100755 index 0000000000..73e9ee5310 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/archivematicaXMLNamesSpace.py @@ -0,0 +1,43 @@ +#!/usr/bin/python -OO +# +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + + +xsiNS = "http://www.w3.org/2001/XMLSchema-instance" +metsNS = "http://www.loc.gov/METS/" +premisNS = "info:lc/xmlns/premis-v2" +dctermsNS = "http://purl.org/dc/terms/" +fitsNS = "http://hul.harvard.edu/ois/xml/ns/fits/fits_output" +xlinkNS = "http://www.w3.org/1999/xlink" +dcNS="http://purl.org/dc/elements/1.1/" + +dcBNS = "{" + dcNS + "}" +dctermsBNS = "{" + dctermsNS + "}" +xsiBNS = "{" + xsiNS + "}" +metsBNS = "{" + metsNS + "}" +premisBNS = "{" + premisNS + "}" +fitsBNS = "{" + fitsNS + "}" +xlinkBNS = "{" + xlinkNS + "}" + +NSMAP = { "xsi" : xsiNS, \ +"xlink": xlinkNS } diff --git a/src/MCPClient/lib/clientScripts/assignBAGUUID.sh b/src/MCPClient/lib/clientScripts/assignBAGUUID.sh new file mode 100755 index 0000000000..08b7deaf1d --- /dev/null +++ b/src/MCPClient/lib/clientScripts/assignBAGUUID.sh @@ -0,0 +1,40 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +target="$1" +date="$2" +SIPUUID="$3" +#uuidVersion="4" +#SIPUUID=`uuid -v ${uuidVersion}` + +sudo chown -R archivematica:archivematica "${target}" +chmod -R "750" "${target}" +if [ -d "${target}logs/" ]; then + echo ${date} > "${target}logs/acquiredSIPDateTime.log" +fi +mv "${target}" "`dirname "${target}"`/`basename "${target}"`-${SIPUUID}" + +exit $? + + diff --git a/src/MCPClient/lib/clientScripts/backupDIP.sh b/src/MCPClient/lib/clientScripts/backupDIP.sh new file mode 100755 index 0000000000..4d1a119f13 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/backupDIP.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +DIP="$1" +DIPsStore="${2}`basename $1`/" +uploadedObjects="DIPUploadedFiles.txt" + +cd "$DIP" +ls objects > "$uploadedObjects" + +mkdir "${DIPsStore}" +chmod 750 "${DIPsStore}" +mv "objectsBackup" "${DIPsStore}." +chmod -R 750 "$uploadedObjects" +mv "$uploadedObjects" "${DIPsStore}." +chmod 770 "${DIPsStore}${uploadedObjects}" +cp "METS.xml" "${DIPsStore}." +chmod 750 "${DIPsStore}METS.xml" diff --git a/src/MCPClient/lib/clientScripts/checkForAccessDirectory.py b/src/MCPClient/lib/clientScripts/checkForAccessDirectory.py new file mode 100755 index 0000000000..0826621ee4 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/checkForAccessDirectory.py @@ -0,0 +1,126 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +import os +import sys +from optparse import OptionParser +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +from fileOperations import updateFileLocation +from fileOperations import renameAsSudo + +def something(SIPDirectory, accessDirectory, objectsDirectory, DIPDirectory, SIPUUID, date, copy=False): + #exitCode = 435 + exitCode = 179 + print SIPDirectory + #For every file, & directory Try to find the matching file & directory in the objects directory + for (path, dirs, files) in os.walk(accessDirectory): + for file in files: + accessPath = os.path.join(path, file) + objectPath = accessPath.replace(accessDirectory, objectsDirectory, 1) + objectName = os.path.basename(objectPath) + objectNameExtensionIndex = objectName.rfind(".") + + if objectNameExtensionIndex != -1: + objectName = objectName[:objectNameExtensionIndex + 1] + objectNameLike = os.path.join( os.path.dirname(objectPath), objectName).replace(SIPDirectory, "%SIPDirectory%", 1) + #sql = "SELECT fileUUID, currentLocation FROM Files WHERE currentLocation LIKE '%s%' AND removedTime = 0 AND SIPUUID = '%s'" % (objectNameLike, SIPUUID) + #ValueError: unsupported format character ''' (0x27) at index 76 + sql = "SELECT fileUUID, currentLocation FROM Files WHERE currentLocation LIKE '" + objectNameLike + "%' AND removedTime = 0 AND SIPUUID = '"+ SIPUUID + "'" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + if not row: + print >>sys.stderr, "No corresponding object for:", accessPath.replace(SIPDirectory, "%SIPDirectory%", 1) + exitCode = 1 + update = [] + while row != None: + objectUUID = row[0] + objectPath = row[1] + objectExtension = objectPath.replace(objectNameLike, "", 1) + print objectName[objectNameExtensionIndex + 1:], objectExtension, "\t", + if objectExtension.find(".") != -1: + print + row = c.fetchone() + continue + print objectName[objectNameExtensionIndex + 1:], objectExtension, "\t", + print row + dipPath = os.path.join(DIPDirectory, "objects", "%s-%s" % (objectUUID, os.path.basename(accessPath))) + if copy: + print "TODO - copy not supported yet" + else: + # + dest = dipPath + renameAsSudo(accessPath, dest) + + src = accessPath.replace(SIPDirectory, "%SIPDirectory%") + dst = dest.replace(SIPDirectory, "%SIPDirectory%") + update.append((src, dst)) + + # + row = c.fetchone() + sqlLock.release() + for src, dst in update: + eventDetail = "" + eventOutcomeDetailNote = "moved from=\"" + src + "\"; moved to=\"" + dst + "\"" + updateFileLocation(src, dst, "movement", date, eventDetail, sipUUID=SIPUUID, eventOutcomeDetailNote = eventOutcomeDetailNote) + return exitCode + + + +if __name__ == '__main__': + parser = OptionParser() + #'--SIPDirectory "%SIPDirectory%" --accessDirectory "objects/access/" --objectsDirectory "objects" --DIPDirectory "DIP" -c' + parser.add_option("-s", "--SIPDirectory", action="store", dest="SIPDirectory", default="") + parser.add_option("-u", "--SIPUUID", action="store", dest="SIPUUID", default="") + parser.add_option("-a", "--accessDirectory", action="store", dest="accessDirectory", default="") + parser.add_option("-o", "--objectsDirectory", action="store", dest="objectsDirectory", default="") + parser.add_option("-d", "--DIPDirectory", action="store", dest="DIPDirectory", default="") + parser.add_option("-t", "--date", action="store", dest="date", default="") + parser.add_option('-c', '--copy', dest='copy', action='store_true') + + (opts, args) = parser.parse_args() + + SIPDirectory = opts.SIPDirectory + accessDirectory = os.path.join(SIPDirectory, opts.accessDirectory) + objectsDirectory = os.path.join(SIPDirectory, opts.objectsDirectory) + DIPDirectory = os.path.join(SIPDirectory, opts.DIPDirectory) + SIPUUID = opts.SIPUUID + date = opts.date + copy = opts.copy + + if not os.path.isdir(accessDirectory): + print "no access directory in this sip" + exit(0) + + + try: + if not os.path.isdir(DIPDirectory): + os.mkdir(DIPDirectory) + if not os.path.isdir(os.path.join(DIPDirectory, "objects")): + os.mkdir(os.path.join(DIPDirectory, "objects")) + except: + print "error creating DIP directory" + + exitCode = something(SIPDirectory, accessDirectory, objectsDirectory, DIPDirectory, SIPUUID, date, copy) + exit(exitCode) diff --git a/src/MCPClient/lib/clientScripts/checkForServiceDirectory.py b/src/MCPClient/lib/clientScripts/checkForServiceDirectory.py new file mode 100755 index 0000000000..e157beb193 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/checkForServiceDirectory.py @@ -0,0 +1,113 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +import os +import sys +import uuid +from optparse import OptionParser +import re +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +from databaseFunctions import insertIntoDerivations + + +def something(SIPDirectory, serviceDirectory, objectsDirectory, SIPUUID, date): + #exitCode = 435 + exitCode = 0 + print SIPDirectory + #For every file, & directory Try to find the matching file & directory in the objects directory + for (path, dirs, files) in os.walk(serviceDirectory): + for file in files: + servicePreExtension = "_me" + originalPreExtension = "_m" + file1Full = os.path.join(path, file).replace(SIPDirectory, "%SIPDirectory%", 1) #service + + a = file.rfind(servicePreExtension + ".") + file2 = "" + if a != -1: + file2Full = os.path.join(path, file[:a] + originalPreExtension + ".").replace(SIPDirectory + "objects/service/", "%SIPDirectory%objects/", 1) #service + else: + a = file.rfind(".") + if a != -1: #if a period is found + a += 1 #include the period + file2Full = os.path.join(path, file[:a]).replace(SIPDirectory + "objects/service/", "%SIPDirectory%objects/", 1) #service + accessPath = os.path.join(path, file) + sql = "UPDATE Files SET fileGrpUse='service' WHERE currentLocation = '" + file1Full + "' AND removedTime = 0 AND SIPUUID = '"+ SIPUUID + "'" + rows = databaseInterface.runSQL(sql) + sql = "UPDATE Files SET fileGrpUUID= (SELECT fileUUID FROM (SELECT * FROM Files WHERE removedTime = 0 AND SIPUUID = '"+ SIPUUID + "') AS F WHERE currentLocation LIKE '" + file2Full + "%') WHERE currentLocation = '" + file1Full + "' AND removedTime = 0 AND SIPUUID = '"+ SIPUUID + "'" + print sql + rows = databaseInterface.runSQL(sql) + return exitCode + + +#only works if files have the same extension +def regular(SIPDirectory, objectsDirectory, SIPUUID, date): + searchForRegularExpressions = True + if not searchForRegularExpressions: + return + original = "" + service = "" + + + for (path, dirs, files) in os.walk(objectsDirectory): + for file in files: + m = re.search("_me\.[a-zA-Z0-9]{2,4}$", file) + if m != None: + file1Full = os.path.join(path, file).replace(SIPDirectory, "%SIPDirectory%", 1) #service + file2 = file.replace(m.group(0), m.group(0).replace("_me", "_m", 1)) + file2Full = os.path.join(path, file2).replace(SIPDirectory, "%SIPDirectory%", 1) #original + accessPath = os.path.join(path, file) + sql = "UPDATE Files SET fileGrpUse='service' WHERE currentLocation = '" + file1Full + "' AND removedTime = 0 AND SIPUUID = '"+ SIPUUID + "'" + rows = databaseInterface.runSQL(sql) + sql = "UPDATE Files SET fileGrpUUID= (SELECT fileUUID FROM (SELECT * FROM Files WHERE removedTime = 0 AND SIPUUID = '"+ SIPUUID + "') AS F WHERE currentLocation = '" + file2Full + "') WHERE currentLocation = '" + file1Full + "' AND removedTime = 0 AND SIPUUID = '"+ SIPUUID + "'" + rows = databaseInterface.runSQL(sql) + + +if __name__ == '__main__': + while False: + import time + time.sleep(10) + parser = OptionParser() + #'--SIPDirectory "%SIPDirectory%" --serviceDirectory "objects/service/" --objectsDirectory "objects/" --SIPUUID "%SIPUUID%" --date "%date%"' ); + parser.add_option("-s", "--SIPDirectory", action="store", dest="SIPDirectory", default="") + parser.add_option("-u", "--SIPUUID", action="store", dest="SIPUUID", default="") + parser.add_option("-a", "--serviceDirectory", action="store", dest="serviceDirectory", default="") + parser.add_option("-o", "--objectsDirectory", action="store", dest="objectsDirectory", default="") + parser.add_option("-t", "--date", action="store", dest="date", default="") + + (opts, args) = parser.parse_args() + + SIPDirectory = opts.SIPDirectory + serviceDirectory = os.path.join(SIPDirectory, opts.serviceDirectory) + objectsDirectory = os.path.join(SIPDirectory, opts.objectsDirectory) + SIPUUID = opts.SIPUUID + date = opts.date + + if not os.path.isdir(serviceDirectory): + print "no service directory in this sip" + #regular(SIPDirectory, objectsDirectory, SIPUUID, date) + exit(0) + + exitCode = something(SIPDirectory, serviceDirectory, objectsDirectory, SIPUUID, date) + exit(exitCode) diff --git a/src/MCPClient/lib/clientScripts/checkForSubmissionDocumenation.py b/src/MCPClient/lib/clientScripts/checkForSubmissionDocumenation.py new file mode 100755 index 0000000000..4b229c4b89 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/checkForSubmissionDocumenation.py @@ -0,0 +1,39 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +import os +import sys +target = sys.argv[1] +if not os.path.isdir(target): + print >>sys.stderr, "Directory doesn't exist: ", target + os.mkdir(target) +if os.listdir(target) == []: + print >>sys.stderr, "Directory is empty: ", target + fileName = os.path.join(target, "submissionDocumentation.log") + f = open(fileName, 'a') + f.write("No submission documentation added") + f.close() + os.chmod(fileName, 488) +else: + exit(0) diff --git a/src/MCPClient/lib/clientScripts/checkTransferDirectoryForObjects.py b/src/MCPClient/lib/clientScripts/checkTransferDirectoryForObjects.py new file mode 100755 index 0000000000..170adaf56c --- /dev/null +++ b/src/MCPClient/lib/clientScripts/checkTransferDirectoryForObjects.py @@ -0,0 +1,36 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +import os +import sys +exitInidcatingThereAreObjects = 179 + +if __name__ == '__main__': + objectsDir = sys.argv[1] + os.path.isdir(objectsDir) + ret = 0 + for dirs, subDirs, files in os.walk(objectsDir): + if files != None and files != []: + ret = exitInidcatingThereAreObjects + break + exit(ret) diff --git a/src/MCPClient/lib/clientScripts/copyTransferSubmissionDocumentation.py b/src/MCPClient/lib/clientScripts/copyTransferSubmissionDocumentation.py new file mode 100755 index 0000000000..0b96b01cbd --- /dev/null +++ b/src/MCPClient/lib/clientScripts/copyTransferSubmissionDocumentation.py @@ -0,0 +1,50 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +import os +import sys +import shutil +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface + + +if __name__ == '__main__': + sipUUID = sys.argv[1] + submissionDocumentationDirectory = sys.argv[2] + sharedPath = sys.argv[3] + + sql = """SELECT Transfers.currentLocation FROM Transfers WHERE Transfers.transferUUID IN (SELECT transferUUID FROM Files WHERE removedTime = 0 AND sipUUID = '%s');""" % (sipUUID) + print sql + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + #print row + transferLocation = row[0].replace("%sharedPath%", sharedPath) + transferNameUUID = os.path.basename(os.path.abspath(transferLocation)) + src = os.path.join(transferLocation, "metadata/submissionDocumentation") + dst = os.path.join(submissionDocumentationDirectory, "transfer-%s" % (transferNameUUID)) + print >>sys.stderr, src, " -> ", dst + shutil.copytree(src, dst) + row = c.fetchone() + sqlLock.release() diff --git a/src/MCPClient/lib/clientScripts/copyTransfersMetadataAndLogs.py b/src/MCPClient/lib/clientScripts/copyTransfersMetadataAndLogs.py new file mode 100755 index 0000000000..d12b5e5942 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/copyTransfersMetadataAndLogs.py @@ -0,0 +1,80 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +import os +import sys +import shutil +from optparse import OptionParser +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface + + +def main(sipUUID, transfersDirectory, sharedPath=""): + if not os.path.exists(transfersDirectory): + os.makedirs(transfersDirectory) + + exitCode = 0 + sql = """SELECT Files.transferUUID, Transfers.currentLocation FROM Files + JOIN Transfers on Transfers.transferUUID = Files.transferUUID + WHERE sipUUID = '%s' + GROUP BY Files.transferUUID;""" % (sipUUID) + + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + try: + transferUUID = row[0] + transferPath = row[1] + if sharedPath != "": + transferPath = transferPath.replace("%sharedPath%", sharedPath, 1) + transferBasename = os.path.basename(os.path.abspath(transferPath)) + transferMetaDestDir = os.path.join(transfersDirectory, transferBasename) + if not os.path.exists(transferMetaDestDir): + os.makedirs(transferMetaDestDir) + shutil.copytree(transferPath + "logs", os.path.join(transferMetaDestDir, "logs")) + print "copied: ", transferPath + "logs", " -> ", os.path.join(transferMetaDestDir, "logs") + shutil.copytree(transferPath + "metadata", os.path.join(transferMetaDestDir, "metadata")) + print "copied: ", transferPath + "metadata", " -> ", os.path.join(transferMetaDestDir, "metadata") + + except Exception as inst: + print >>sys.stderr, type(inst) + print >>sys.stderr, inst.args + print >>sys.stderr, "Error with transfer: ", row + exitCode += 1 + row = c.fetchone() + + sqlLock.release() + exit(exitCode) + +if __name__ == '__main__': + while False: #used to stall the mcp and stop the client for testing this module + import time + time.sleep(10) + parser = OptionParser() + parser.add_option("-s", "--sipDirectory", action="store", dest="sipDirectory", default="") + parser.add_option("-S", "--sipUUID", action="store", dest="sipUUID", default="") + parser.add_option("-p", "--sharedPath", action="store", dest="sharedPath", default="/var/archivematica/sharedDirectory/") + (opts, args) = parser.parse_args() + + + main(opts.sipUUID, opts.sipDirectory+"metadata/transfers/", sharedPath=opts.sharedPath) diff --git a/src/MCPClient/lib/clientScripts/createEvent.py b/src/MCPClient/lib/clientScripts/createEvent.py new file mode 100755 index 0000000000..54ba576e8a --- /dev/null +++ b/src/MCPClient/lib/clientScripts/createEvent.py @@ -0,0 +1,49 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +from optparse import OptionParser +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from databaseFunctions import insertIntoEvents + + +if __name__ == '__main__': + parser = OptionParser() + parser.add_option("-i", "--fileUUID", action="store", dest="fileUUID", default="") + parser.add_option("-t", "--eventType", action="store", dest="eventType", default="") + parser.add_option("-d", "--eventDateTime", action="store", dest="eventDateTime", default="") + parser.add_option("-e", "--eventDetail", action="store", dest="eventDetail", default="") + parser.add_option("-o", "--eventOutcome", action="store", dest="eventOutcome", default="") + parser.add_option("-n", "--eventOutcomeDetailNote", action="store", dest="eventOutcomeDetailNote", default="") + parser.add_option("-u", "--eventIdentifierUUID", action="store", dest="eventIdentifierUUID", default="") + + + (opts, args) = parser.parse_args() + + insertIntoEvents(fileUUID=opts.fileUUID, \ + eventIdentifierUUID=opts.eventIdentifierUUID, \ + eventType=opts.eventType, \ + eventDateTime=opts.eventDateTime, \ + eventDetail=opts.eventDetail, \ + eventOutcome=opts.eventOutcome, \ + eventOutcomeDetailNote=opts.eventOutcomeDetailNote) diff --git a/src/MCPClient/lib/clientScripts/createEventsForGroup.py b/src/MCPClient/lib/clientScripts/createEventsForGroup.py new file mode 100755 index 0000000000..41d8daf6b6 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/createEventsForGroup.py @@ -0,0 +1,55 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +from optparse import OptionParser +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from databaseFunctions import insertIntoEvents +import databaseInterface + + +if __name__ == '__main__': + """creates events for all files in the group""" + parser = OptionParser() + parser.add_option("-i", "--groupUUID", action="store", dest="groupUUID", default="") + parser.add_option("-g", "--groupType", action="store", dest="groupType", default="") + parser.add_option("-t", "--eventType", action="store", dest="eventType", default="") + parser.add_option("-d", "--eventDateTime", action="store", dest="eventDateTime", default="") + parser.add_option("-e", "--eventDetail", action="store", dest="eventDetail", default="") + parser.add_option("-o", "--eventOutcome", action="store", dest="eventOutcome", default="") + parser.add_option("-n", "--eventOutcomeDetailNote", action="store", dest="eventOutcomeDetailNote", default="") + parser.add_option("-u", "--eventIdentifierUUID", action="store", dest="eventIdentifierUUID", default="") + + + (opts, args) = parser.parse_args() + sql = """SELECT fileUUID FROM Files WHERE removedTime = 0 AND %s = '%s';""" % (opts.groupType, opts.groupUUID) + rows = databaseInterface.queryAllSQL(sql) + for row in rows: + fileUUID = row[0] + insertIntoEvents(fileUUID=fileUUID, \ + eventIdentifierUUID=opts.eventIdentifierUUID, \ + eventType=opts.eventType, \ + eventDateTime=opts.eventDateTime, \ + eventDetail=opts.eventDetail, \ + eventOutcome=opts.eventOutcome, \ + eventOutcomeDetailNote=opts.eventOutcomeDetailNote) diff --git a/src/MCPClient/lib/clientScripts/createSIPfromTransferObjects.py b/src/MCPClient/lib/clientScripts/createSIPfromTransferObjects.py new file mode 100755 index 0000000000..ad5b89890c --- /dev/null +++ b/src/MCPClient/lib/clientScripts/createSIPfromTransferObjects.py @@ -0,0 +1,75 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +import uuid +import shutil +import MySQLdb +import os +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +import databaseFunctions +from archivematicaCreateStructuredDirectory import createStructuredDirectory + +#def updateDB(dst, transferUUID): +# sql = """UPDATE Transfers SET currentLocation='""" + dst + """' WHERE transferUUID='""" + transferUUID + """';""" +# databaseInterface.runSQL(sql) + +#moveSIP(src, dst, transferUUID, sharedDirectoryPath) + +if __name__ == '__main__': + objectsDirectory = sys.argv[1] + transferName = sys.argv[2] + transferUUID = sys.argv[3] + processingDirectory = sys.argv[4] + autoProcessSIPDirectory = sys.argv[5] + sharedPath = sys.argv[6] + sipName = transferName + sipUUID = uuid.uuid4().__str__() + + + tmpSIPDir = os.path.join(processingDirectory, sipName) + "/" + destSIPDir = os.path.join(autoProcessSIPDirectory, sipName) + "/" + createStructuredDirectory(tmpSIPDir) + databaseFunctions.createSIP(destSIPDir.replace(sharedPath, '%sharedPath%'), sipUUID) + + #move the objects to the SIPDir + for item in os.listdir(objectsDirectory): + shutil.move(os.path.join(objectsDirectory, item), os.path.join(tmpSIPDir, "objects", item)) + + #get the database list of files in the objects directory + #for each file, confirm it's in the SIP objects directory, and update the current location/ owning SIP' + sql = """SELECT fileUUID, currentLocation FROM Files WHERE removedTime = 0 AND currentLocation LIKE '\%transferDirectory\%objects%' AND transferUUID = '""" + transferUUID + "'" + for row in databaseInterface.queryAllSQL(sql): + fileUUID = row[0] + currentPath = databaseFunctions.deUnicode(row[1]) + currentSIPFilePath = currentPath.replace("%transferDirectory%", tmpSIPDir) + if os.path.isfile(currentSIPFilePath): + sql = """UPDATE Files SET currentLocation='%s', sipUUID='%s' WHERE fileUUID='%s'""" % (MySQLdb.escape_string(currentPath.replace("%transferDirectory%", "%SIPDirectory%")), sipUUID, fileUUID) + databaseInterface.runSQL(sql) + else: + print >>sys.stderr, "file not found: ", currentSIPFilePath + + + #moveSIPTo autoProcessSIPDirectory + shutil.move(tmpSIPDir, destSIPDir) diff --git a/src/MCPClient/lib/clientScripts/createXmlEventsAssist.py b/src/MCPClient/lib/clientScripts/createXmlEventsAssist.py new file mode 100755 index 0000000000..aac963397e --- /dev/null +++ b/src/MCPClient/lib/clientScripts/createXmlEventsAssist.py @@ -0,0 +1,108 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +import lxml.etree as etree +import ConfigParser + +config2 = ConfigParser.SafeConfigParser() +config2.read("/etc/archivematica/MCPClient/clientConfig.conf") + +config = ConfigParser.SafeConfigParser() +config.read(config2.get('MCPClient', "sharedDirectoryMounted") + "sharedMicroServiceTasksConfigs/createXmlEventsAssist/organization.ini") + + +yourAgentIdentifierType=config.get('organization', "yourAgentIdentifierType") +yourAgentIdentifierValue=config.get('organization', "yourAgentIdentifierValue") +yourAgentName=config.get('organization', "yourAgentName") +yourAgentType=config.get('organization', "yourAgentType") + +organizationEvents = ["receive SIP", "SIP review", "appraise SIP"] + +def createLinkingAgentIdentifierAssist(linkingAgentIdentifierType, linkingAgentIdentifierValue): + ret = etree.Element("linkingAgentIdentifier") + etree.SubElement(ret, "linkingAgentIdentifierType").text = linkingAgentIdentifierType + etree.SubElement(ret, "linkingAgentIdentifierValue").text = linkingAgentIdentifierValue + return ret + +def createArchivematicaLinkingAgentIdentifier(): + return createLinkingAgentIdentifierAssist("preservation system", "Archivematica-0.7") + +def createOrgLinkingAgentIdentifier(): + return createLinkingAgentIdentifierAssist(yourAgentIdentifierType, yourAgentIdentifierValue) + +def createAgent(agentIdentifierType, agentIdentifierValue, agentName, agentType): + ret = etree.Element("agent") + agentIdentifier = etree.SubElement( ret, "agentIdentifier") + etree.SubElement( agentIdentifier, "agentIdentifierType").text = agentIdentifierType + etree.SubElement( agentIdentifier, "agentIdentifierValue").text = agentIdentifierValue + etree.SubElement( ret, "agentName").text = agentName + etree.SubElement( ret, "agentType").text = agentType + return ret + + +def createArchivematicaAgent(): + return createAgent("preservation system", "Archivematica-0.7", "Archivematica", "software") + +def createOrganizationAgent(): + return createAgent(yourAgentIdentifierType, yourAgentIdentifierValue, yourAgentName, yourAgentType) + +def createLinkingAgentIdentifier(eType): + if eType in organizationEvents: + return createOrgLinkingAgentIdentifier() + else: + return createArchivematicaLinkingAgentIdentifier() + + +def createOutcomeInformation( eventOutcomeDetailNote = None, eventOutcomeText = None): + ret = etree.Element("eventOutcomeInformation") + etree.SubElement(ret, "eventOutcome").text = eventOutcomeText + eventOutcomeDetail = etree.SubElement(ret, "eventOutcomeDetail") + etree.SubElement(eventOutcomeDetail, "eventOutcomeDetailNote").text = eventOutcomeDetailNote + return ret + +def createEvent( eIDValue, eType, eIDType="UUID", \ +eventDateTime = "now", \ +eventDetailText = "", \ +eOutcomeInformation = createOutcomeInformation(), \ +linkingAgentIdentifier = None): + ret = etree.Element("event") + eventIdentifier = etree.SubElement(ret, "eventIdentifier") + etree.SubElement(eventIdentifier, "eventIdentifierType").text = eIDType + etree.SubElement(eventIdentifier, "eventIdentifierValue").text = eIDValue + etree.SubElement(ret, "eventType").text = eType + etree.SubElement(ret, "eventDateTime").text = eventDateTime + eDetail = etree.SubElement(ret, "eventDetail") + eDetail.text = eventDetailText + if eOutcomeInformation != None: + ret.append(eOutcomeInformation) + if not linkingAgentIdentifier: + linkingAgentIdentifier = createLinkingAgentIdentifier(eType) + ret.append(linkingAgentIdentifier) + return ret + +if __name__ == '__main__': + print "This is a support file." + print "testing..." + event = createEvent("test", "test") + print etree.tostring(event, pretty_print=True) diff --git a/src/MCPClient/lib/clientScripts/elasticSearchIndexProcessAIP.py b/src/MCPClient/lib/clientScripts/elasticSearchIndexProcessAIP.py new file mode 100755 index 0000000000..eeb9b46a25 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/elasticSearchIndexProcessAIP.py @@ -0,0 +1,52 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2011 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Mike Cantelon +# @version svn: $Id$ +import sys, os, time, ConfigParser +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +sys.path.append("/usr/lib/archivematica/archivematicaCommon/externals") +import elasticSearchFunctions + +exitCode = 0 + +if __name__ == '__main__': + clientConfigFilePath = '/etc/archivematica/MCPClient/clientConfig.conf' + config = ConfigParser.SafeConfigParser() + config.read(clientConfigFilePath) + + elasticsearchDisabled = False + + try: + elasticsearchDisabled = config.getboolean('MCPClient', "disableElasticsearchIndexing") + except: + pass + + if elasticsearchDisabled is True: + print 'Skipping indexing: indexing is currently disabled in ' + clientConfigFilePath + '.' + + else: + pathToAIP = sys.argv[1] + uuid = sys.argv[2] + + exitCode = elasticSearchFunctions.connect_and_index('aips', 'aip', uuid, pathToAIP) + +quit(exitCode) diff --git a/src/MCPClient/lib/clientScripts/elasticSearchIndexProcessTransfer.py b/src/MCPClient/lib/clientScripts/elasticSearchIndexProcessTransfer.py new file mode 100755 index 0000000000..346a985d2a --- /dev/null +++ b/src/MCPClient/lib/clientScripts/elasticSearchIndexProcessTransfer.py @@ -0,0 +1,52 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2011 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Mike Cantelon +# @version svn: $Id$ +import sys, os, time, ConfigParser +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +sys.path.append("/usr/lib/archivematica/archivematicaCommon/externals") +import elasticSearchFunctions + +exitCode = 0 + +if __name__ == '__main__': + clientConfigFilePath = '/etc/archivematica/MCPClient/clientConfig.conf' + config = ConfigParser.SafeConfigParser() + config.read(clientConfigFilePath) + + elasticsearchDisabled = False + + try: + elasticsearchDisabled = config.getboolean('MCPClient', "disableElasticsearchIndexing") + except: + pass + + if elasticsearchDisabled is True: + print 'Skipping indexing: indexing is currently disabled in ' + clientConfigFilePath + '.' + + else: + pathToTransfer = sys.argv[1] + 'objects' + transferUUID = sys.argv[2] + + exitCode = elasticSearchFunctions.connect_and_index('transfers', 'transfer', transferUUID, pathToTransfer) + +quit(exitCode) diff --git a/src/MCPClient/lib/clientScripts/extractBAG.sh b/src/MCPClient/lib/clientScripts/extractBAG.sh new file mode 100755 index 0000000000..f89cd8c4c7 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/extractBAG.sh @@ -0,0 +1,45 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +#source /etc/archivematica/archivematicaConfig.conf +set -e +DIRNAME="`dirname "$1"`" +sudo chown -R archivematica:archivematica "$1" +sudo chmod 777 -R "$1" +exitCode=0 +#for a in `ls "${1}"*.zip`; do +find "${1}" -name "*.zip" -printf "%f:%h\n" | while IFS=":" read a PATH +do + extractedDirectory="${DIRNAME}" + echo Extracting to: "$extractedDirectory" 1>&2 + #/bin/mkdir "$extractedDirectory" + /usr/bin/7z x -bd -o"${extractedDirectory}" "${1}${a}" + exitCode=$(($exitCode + $? )) + /usr/bin/sudo /bin/chown -R archivematica:archivematica "${extractedDirectory}" + /usr/bin/sudo /bin/chmod -R 770 "${extractedDirectory}" + /usr/bin/sudo /bin/chmod 777 "${1}${a}" + /bin/rm "${1}${a}" +done +/bin/rm "${1}" -r +exit $exitCode + diff --git a/src/MCPClient/lib/clientScripts/extractBagTransfer.py b/src/MCPClient/lib/clientScripts/extractBagTransfer.py new file mode 100755 index 0000000000..ea4bb14f63 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/extractBagTransfer.py @@ -0,0 +1,82 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +import shutil +import os +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from executeOrRunSubProcess import executeOrRun +import databaseInterface + + +def extract(target, destinationDirectory): + command = """/usr/bin/7z x -bd -o"%s" "%s" """ % (destinationDirectory, target) + exitC, stdOut, stdErr = executeOrRun("command", command, printing=False) + if exitC != 0: + print stdOut + print >>sys.stderr, "Failed extraction: ", command, "\r\n", stdErr + exit(exitC) + + + + +if __name__ == '__main__': + target = sys.argv[1] + transferUUID = sys.argv[2] + processingDirectory = sys.argv[3] + sharedPath = sys.argv[4] + + basename = os.path.basename(target) + basename = basename[:basename.rfind(".")] + + destinationDirectory = os.path.join(processingDirectory, basename) + zipLocation = os.path.join(processingDirectory, os.path.basename(target)) + + #move to processing directory + shutil.move(target, zipLocation) + + #extract + extract(zipLocation, destinationDirectory) + + #checkForTopLevelBag + listdir = os.listdir(destinationDirectory) + if len(listdir) == 1: + internalBagName = listdir[0] + #print "ignoring BagIt internal name: ", internalBagName + temp = destinationDirectory + "-tmp" + shutil.move(destinationDirectory, temp) + #destinationDirectory = os.path.join(processingDirectory, internalBagName) + shutil.move(os.path.join(temp, internalBagName), destinationDirectory) + os.rmdir(temp) + + #update transfer + destinationDirectoryDB = destinationDirectory.replace(sharedPath, "%sharedPath%", 1) + sql = """UPDATE Transfers SET currentLocation = '%s' WHERE transferUUID = '%s';""" % (destinationDirectoryDB, transferUUID) + databaseInterface.runSQL(sql) + + #remove bag + os.remove(zipLocation) + + + + diff --git a/src/MCPClient/lib/clientScripts/extractMaildirAttachments.py b/src/MCPClient/lib/clientScripts/extractMaildirAttachments.py new file mode 100755 index 0000000000..0517257f92 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/extractMaildirAttachments.py @@ -0,0 +1,177 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +import mailbox +import sys +import os +import uuid +import traceback +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from externals.extractMaildirAttachments import parse +from fileOperations import addFileToTransfer +from fileOperations import updateSizeAndChecksum +import databaseInterface + +def writeFile(filePath, fileContents): + try: + os.makedirs(os.path.dirname(filePath)) + except: + pass + print filePath + FILE = open(filePath,"w") + FILE.writelines(fileContents) + FILE.close() + +def addFile(filePath, transferPath, transferUUID, date, eventDetail = "", fileUUID = uuid.uuid4().__str__()): + taskUUID = uuid.uuid4().__str__() + filePathRelativeToSIP = filePath.replace(transferPath, "%transferDirectory%", 1) + addFileToTransfer(filePathRelativeToSIP, fileUUID, transferUUID, taskUUID, date, sourceType="unpacking", eventDetail=eventDetail) + updateSizeAndChecksum(fileUUID, filePath, date, uuid.uuid4.__str__()) + +def getFileUUIDofSourceFile(transferUUID, sourceFilePath): + ret = "" + sql = """SELECT fileUUID FROM Files WHERE removedTime = 0 AND transferUUID = '%s' AND currentLocation LIKE '%s%%';""" % (transferUUID, sourceFilePath.replace('%', '\%')) + rows = databaseInterface.queryAllSQL(sql) + if len(rows): + ret = rows[0][0] + return ret + +def setSourceFileToBeExcludedFromDIP(sourceFileUUID): + sql = """INSERT INTO FilesIdentifiedIDs (fileUUID, fileID) VALUES ('%s', (SELECT pk FROM FileIDs WHERE description = 'A maildir email file')); """ % (sourceFileUUID) + databaseInterface.runSQL(sql) + +def addKeyFileToNormalizeMaildirOffOf(relativePathToRepresent, mirrorDir, transferPath, transferUUID, date, eventDetail = "", fileUUID=uuid.uuid4().__str__()): + basename = os.path.basename(mirrorDir) + dirname = os.path.dirname(mirrorDir) + outFile = os.path.join(dirname, basename + ".archivematicaMaildir") + content = """#This file is used in the archivematica system to represent a maildir dirctory, for normalization and permission purposes. +[archivematicaMaildir] +path = %s + """ % (relativePathToRepresent) + f = open(outFile, 'w') + f.write(content) + f.close() + addFile(outFile, transferPath, transferUUID, date, eventDetail=eventDetail, fileUUID=fileUUID) + return + +if __name__ == '__main__': + #http://www.doughellmann.com/PyMOTW/mailbox/ + while False: #used to stall the mcp and stop the client for testing this module + import time + time.sleep(10) + global errorCounter + errorCounter = 0 + transferDir = sys.argv[1] + transferUUID = sys.argv[2] + date = sys.argv[3] + maildir = transferDir + "objects/Maildir/" + outXML = transferDir + "logs/attachmentExtraction.xml" + mirrorDir = os.path.join(transferDir, "objects/attachments") + try: + os.makedirs(mirrorDir) + except: + pass + import lxml.etree as etree + #print "Extracting attachments from: " + maildir + root = etree.Element("ArchivematicaMaildirAttachmentExtractionRecord") + root.set("directory", maildir) + for maildirsub2 in os.listdir(maildir): + maildirsub = os.path.join(maildir, maildirsub2) + #print "Extracting attachments from: " + maildirsub + md = mailbox.Maildir(maildirsub, None) + directory = etree.SubElement(root, "subDir") + directory.set("dir", maildirsub2) + try: + for item in md.iterkeys(): + try: + subDir = md.get_message(item).get_subdir() + sourceFilePath2 = os.path.join(maildir, maildirsub2, subDir, item) + sourceFilePath = sourceFilePath2.replace(transferDir, "%transferDirectory%", 1) + fil = md.get_file(item) + out = parse(fil) + sourceFileUUID = getFileUUIDofSourceFile(transferUUID, sourceFilePath) + setSourceFileToBeExcludedFromDIP(sourceFileUUID) + if len(out['attachments']): + msg = etree.SubElement(directory, "msg") + etree.SubElement(msg, "Message-ID").text = out['msgobj']['Message-ID'][1:-1] + etree.SubElement(msg, "Extracted-from").text = item + etree.SubElement(msg, "Subject").text = out["subject"] + etree.SubElement(msg, "Date").text = out['msgobj']['date'] + etree.SubElement(msg, "To").text = out["to"] + etree.SubElement(msg, "From").text = out["from"] + for i in range(len(out['attachments'])): + try: + attachment = out['attachments'][i] + if attachment.name == None: + continue + attachedFileUUID = uuid.uuid4().__str__() + #attachment = StringIO(file_data) TODO LOG TO FILE + attch = etree.SubElement(msg, "attachment") + #attachment.name = attachment.name[1:-1] + etree.SubElement(attch, "name").text = attachment.name + etree.SubElement(attch, "content_type").text = attachment.content_type + etree.SubElement(attch, "size").text = str(attachment.size) + #print attachment.create_date + # Dates don't appear to be working. Disabling for the moment - Todo + #etree.SubElement(attch, "create_date").text = attachment.create_date + #etree.SubElement(attch, "mod_date").text = attachment.mod_date + #etree.SubElement(attch, "read_date").text = attachment.read_date + + filePath = os.path.join(transferDir, "objects/attachments", maildirsub2, subDir, "%s_%s" % (attachedFileUUID, attachment.name)) + writeFile(filePath, attachment) + eventDetail="Unpacked from: {%s}%s" % (sourceFileUUID, sourceFilePath) + addFile(filePath, transferDir, transferUUID, date, eventDetail=eventDetail, fileUUID=attachedFileUUID) + except Exception as inst: + print >>sys.stderr, sourceFilePath + traceback.print_exc(file=sys.stderr) + print >>sys.stderr, type(inst) # the exception instance + print >>sys.stderr, inst.args + print >>sys.stderr, etree.tostring(msg) + print >>sys.stderr + errorCounter += 1 + except Exception as inst: + print >>sys.stderr, sourceFilePath + traceback.print_exc(file=sys.stderr) + print >>sys.stderr, type(inst) # the exception instance + print >>sys.stderr, inst.args + print >>sys.stderr + errorCounter += 1 + except Exception as inst: + print >>sys.stderr, "INVALID MAILDIR FORMAT" + print >>sys.stderr, type(inst) + print >>sys.stderr, inst.args + exit(-10) + mirrorDir = os.path.join(transferDir, "objects/attachments", maildirsub2) + try: + os.makedirs(mirrorDir) + except: + pass + eventDetail = "added for normalization purposes" + fileUUID=uuid.uuid4().__str__() + addKeyFileToNormalizeMaildirOffOf(os.path.join(maildir, maildirsub2).replace(transferDir, "%transferDirectory%", 1), mirrorDir, transferDir, transferUUID, date, eventDetail=eventDetail, fileUUID=fileUUID) + tree = etree.ElementTree(root) + tree.write(outXML, pretty_print=True, xml_declaration=True) + exit(errorCounter) + + diff --git a/src/MCPClient/lib/clientScripts/getContentdmCollectionList.py b/src/MCPClient/lib/clientScripts/getContentdmCollectionList.py new file mode 100755 index 0000000000..dba83b1b42 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/getContentdmCollectionList.py @@ -0,0 +1,46 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Mark Jordan +# @version svn: $Id$ +import sys +import json +import urllib + +# The base URL will be specific to each CONTENTdm server; everything including and +# following 'dmwebservices' is the same. +try: + CollectionListUrl = 'http://%s/dmwebservices/index.php?q=dmGetCollectionList/json' % (sys.argv[1]) + f = urllib.urlopen(CollectionListUrl) + collectionListString = f.read() + collectionList = json.loads(collectionListString) +except: + print "Cannot retrieve CONTENTdm collection list from " + sys.argv[1] + sys.exit(1) + +# We only want two of the elements of each 'collection', alias and name. +cleanCollectionList = {} +for collection in collectionList: + for k, v in collection.iteritems(): + cleanCollectionList[collection['name']] = collection['alias'] + + +print(cleanCollectionList) diff --git a/src/MCPClient/lib/clientScripts/identifyDspaceLicenseFiles.py b/src/MCPClient/lib/clientScripts/identifyDspaceLicenseFiles.py new file mode 100755 index 0000000000..2c36fda3bc --- /dev/null +++ b/src/MCPClient/lib/clientScripts/identifyDspaceLicenseFiles.py @@ -0,0 +1,78 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +import os +import sys +import lxml.etree as etree +import MySQLdb +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface + +def verifyMetsFileSecChecksums(metsFile, date, taskUUID, transferDirectory, transferUUID, relativeDirectory="./"): + print metsFile + DspaceLicenses = "metadata/submissionDocumentation/DspaceLicenses" + try: + path = os.path.join(transferDirectory, DspaceLicenses) + if not os.path.isdir(path): + os.mkdir(path) + except: + print "error creating DspaceLicenses directory." + exitCode = 0 + tree = etree.parse(metsFile) + root = tree.getroot() + for item in root.findall("{http://www.loc.gov/METS/}fileSec/{http://www.loc.gov/METS/}fileGrp"): + #print etree.tostring(item) + #print item + + USE = item.get("USE") + if USE == "LICENSE": + for item2 in item: + if item2.tag == "{http://www.loc.gov/METS/}file": + for item3 in item2: + if item3.tag == "{http://www.loc.gov/METS/}FLocat": + fileLocation = item3.get("{http://www.w3.org/1999/xlink}href") + fileFullPath = os.path.join(relativeDirectory, fileLocation) + #dest = os.path.join(transferDirectory, DspaceLicenses, os.path.basename(fileLocation)) + #renameAsSudo(fileFullPath, dest) + + dbLocation = fileFullPath.replace(transferDirectory, "%transferDirectory%") + sql = """UPDATE Files SET fileGrpUse = 'license' WHERE currentLocation = '%s' AND transferUUID = '%s';""" % (MySQLdb.escape_string(dbLocation), transferUUID) + databaseInterface.runSQL(sql) + #dst = dest.replace(transferDirectory, "%transferDirectory%") + #eventDetail = "" + #eventOutcomeDetailNote = "moved from=\"" + src + "\"; moved to=\"" + dst + "\"" + #updateFileLocation(src, dst, "movement", date, eventDetail, transferUUID=transferUUID, eventOutcomeDetailNote = eventOutcomeDetailNote) + return exitCode + + + +if __name__ == '__main__': + metsFile = sys.argv[1] + date = sys.argv[2] + taskUUID = sys.argv[3] + transferDirectory = sys.argv[4] + transferUUID = sys.argv[5] + + + ret = verifyMetsFileSecChecksums(metsFile, date, taskUUID, transferDirectory, transferUUID, relativeDirectory=os.path.dirname(metsFile) + "/") + quit(ret) diff --git a/src/MCPClient/lib/clientScripts/identifyDspaceMETSFiles.py b/src/MCPClient/lib/clientScripts/identifyDspaceMETSFiles.py new file mode 100755 index 0000000000..6aae2d1be0 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/identifyDspaceMETSFiles.py @@ -0,0 +1,35 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +import sys +import MySQLdb +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface + + + + +if __name__ == '__main__': + metsFileUUID = sys.argv[1] + sql = """UPDATE Files SET fileGrpUse = 'DSPACEMETS' WHERE fileUUID = '%s';""" % (MySQLdb.escape_string(metsFileUUID)) + databaseInterface.runSQL(sql) diff --git a/src/MCPClient/lib/clientScripts/identifyDspaceTextFiles.py b/src/MCPClient/lib/clientScripts/identifyDspaceTextFiles.py new file mode 100755 index 0000000000..ab2ea82d0b --- /dev/null +++ b/src/MCPClient/lib/clientScripts/identifyDspaceTextFiles.py @@ -0,0 +1,72 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +import os +import sys +import lxml.etree as etree +import MySQLdb +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface + +def verifyMetsFileSecChecksums(metsFile, date, taskUUID, transferDirectory, transferUUID, relativeDirectory="./"): + print metsFile + DspaceLicenses = "metadata/submissionDocumentation/DspaceLicenses" + try: + path = os.path.join(transferDirectory, DspaceLicenses) + if not os.path.isdir(path): + os.mkdir(path) + except: + print "error creating DspaceLicenses directory." + exitCode = 0 + tree = etree.parse(metsFile) + root = tree.getroot() + for item in root.findall("{http://www.loc.gov/METS/}fileSec/{http://www.loc.gov/METS/}fileGrp"): + #print etree.tostring(item) + #print item + + USE = item.get("USE") + if USE == "TEXT": + for item2 in item: + if item2.tag == "{http://www.loc.gov/METS/}file": + for item3 in item2: + if item3.tag == "{http://www.loc.gov/METS/}FLocat": + fileLocation = item3.get("{http://www.w3.org/1999/xlink}href") + fileFullPath = os.path.join(relativeDirectory, fileLocation) + + dbLocation = fileFullPath.replace(transferDirectory, "%transferDirectory%") + sql = """UPDATE Files SET fileGrpUse = 'text/ocr' WHERE currentLocation = '%s' AND transferUUID = '%s';""" % (MySQLdb.escape_string(dbLocation), transferUUID) + databaseInterface.runSQL(sql) + return exitCode + + + +if __name__ == '__main__': + metsFile = sys.argv[1] + date = sys.argv[2] + taskUUID = sys.argv[3] + transferDirectory = sys.argv[4] + transferUUID = sys.argv[5] + + + ret = verifyMetsFileSecChecksums(metsFile, date, taskUUID, transferDirectory, transferUUID, relativeDirectory=os.path.dirname(metsFile) + "/") + quit(ret) diff --git a/src/MCPClient/lib/clientScripts/identifyFilesByExtension.py b/src/MCPClient/lib/clientScripts/identifyFilesByExtension.py new file mode 100755 index 0000000000..e8c2b049e1 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/identifyFilesByExtension.py @@ -0,0 +1,45 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +import sys +import os +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +from databaseFunctions import escapeForDB + + +a = """SELECT * FROM FileExtensions JOIN FileIDsByExtension ON FileExtensions.extension = FileIDsByExtension.Extension JOIN FileIDs ON FileIDsByExtension.FileIDs = FileIDs.pk;""" + +def run(target, fileUUID): + basename = os.path.basename(target) + extensionIndex = basename.rfind(".") + if extensionIndex != -1: + extension = basename[extensionIndex+1:] + print "extension:", extension + sql = """INSERT INTO FilesIdentifiedIDs (fileUUID, fileID) VALUES ('%s', (SELECT FileIDs FROM FileIDsByExtension WHERE Extension = '%s'))""" % (escapeForDB(fileUUID), escapeForDB(extension.lower())) + databaseInterface.runSQL(sql) + +if __name__ == '__main__': + target = sys.argv[1] + fileUUID = sys.argv[2] + run(target, fileUUID) \ No newline at end of file diff --git a/src/MCPClient/lib/clientScripts/loadLabelsFromCSV.py b/src/MCPClient/lib/clientScripts/loadLabelsFromCSV.py new file mode 100755 index 0000000000..cddbfd129f --- /dev/null +++ b/src/MCPClient/lib/clientScripts/loadLabelsFromCSV.py @@ -0,0 +1,57 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +import sys +import csv +import os +import MySQLdb +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface + +if __name__ == '__main__': + transferUUID = sys.argv[1] + fileLabels = sys.argv[2] + labelFirst = False + + if not os.path.isfile(fileLabels): + print "No such file:", fileLabels + exit(0) + + with open(fileLabels, 'rb') as f: + reader = csv.reader(f) + for row in reader: + if labelFirst: + label = row[0] + filePath = row[1] + else: + label = row[1] + filePath = row[0] + filePath = os.path.join("%transferDirectory%objects/", filePath) + sql = """UPDATE Files SET label = '%s' WHERE originalLocation = '%s' AND transferUUID = '%s';""" % (MySQLdb.escape_string(label), MySQLdb.escape_string(filePath), transferUUID) + databaseInterface.runSQL(sql) + + + + + diff --git a/src/MCPClient/lib/clientScripts/moveDspaceLicenseFilesToDSpaceLicenses.py b/src/MCPClient/lib/clientScripts/moveDspaceLicenseFilesToDSpaceLicenses.py new file mode 100755 index 0000000000..414bd4e11c --- /dev/null +++ b/src/MCPClient/lib/clientScripts/moveDspaceLicenseFilesToDSpaceLicenses.py @@ -0,0 +1,76 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +import os +import sys +import lxml.etree as etree +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from fileOperations import updateFileLocation +from fileOperations import renameAsSudo + +def verifyMetsFileSecChecksums(metsFile, date, taskUUID, transferDirectory, transferUUID, relativeDirectory="./"): + print metsFile + DspaceLicenses = "metadata/submissionDocumentation/DspaceLicenses" + try: + path = os.path.join(transferDirectory, DspaceLicenses) + if not os.path.isdir(path): + os.mkdir(path) + except: + print "error creating DspaceLicenses directory." + exitCode = 0 + tree = etree.parse(metsFile) + root = tree.getroot() + for item in root.findall("{http://www.loc.gov/METS/}fileSec/{http://www.loc.gov/METS/}fileGrp"): + #print etree.tostring(item) + #print item + + USE = item.get("USE") + if USE == "LICENSE": + for item2 in item: + if item2.tag == "{http://www.loc.gov/METS/}file": + for item3 in item2: + if item3.tag == "{http://www.loc.gov/METS/}FLocat": + fileLocation = item3.get("{http://www.w3.org/1999/xlink}href") + fileFullPath = os.path.join(relativeDirectory, fileLocation) + dest = os.path.join(transferDirectory, DspaceLicenses, os.path.basename(fileLocation)) + renameAsSudo(fileFullPath, dest) + + src = fileFullPath.replace(transferDirectory, "%transferDirectory%") + dst = dest.replace(transferDirectory, "%transferDirectory%") + eventDetail = "" + eventOutcomeDetailNote = "moved from=\"" + src + "\"; moved to=\"" + dst + "\"" + updateFileLocation(src, dst, "movement", date, eventDetail, transferUUID=transferUUID, eventOutcomeDetailNote = eventOutcomeDetailNote) + return exitCode + + + +if __name__ == '__main__': + metsFile = sys.argv[1] + date = sys.argv[2] + taskUUID = sys.argv[3] + transferDirectory = sys.argv[4] + transferUUID = sys.argv[5] + + + ret = verifyMetsFileSecChecksums(metsFile, date, taskUUID, transferDirectory, transferUUID, relativeDirectory=os.path.dirname(metsFile) + "/") + quit(ret) diff --git a/src/MCPClient/lib/clientScripts/moveDspaceMetsFilesToDSpaceMETS.py b/src/MCPClient/lib/clientScripts/moveDspaceMetsFilesToDSpaceMETS.py new file mode 100755 index 0000000000..751d45ca2f --- /dev/null +++ b/src/MCPClient/lib/clientScripts/moveDspaceMetsFilesToDSpaceMETS.py @@ -0,0 +1,73 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +import os +import sys +import lxml.etree as etree +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from fileOperations import updateFileLocation +from fileOperations import renameAsSudo + +def verifyMetsFileSecChecksums(metsFile, date, taskUUID, transferDirectory, transferUUID, relativeDirectory="./"): + print metsFile + DSpaceMets = "metadata/submissionDocumentation/DSpaceMets" + try: + path = os.path.join(transferDirectory, DSpaceMets) + if not os.path.isdir(path): + os.mkdir(path) + except: + print "error creating DSpaceMets directory." + exitCode = 0 + + metsDirectory = os.path.basename(os.path.dirname(metsFile)) + + if metsDirectory == "DSpace_export": + outputDirectory = path + else: + outputDirectory = os.path.join(path, metsDirectory) + if not os.path.isdir(outputDirectory): + os.mkdir(outputDirectory) + + dest = os.path.join(outputDirectory, "mets.xml") + renameAsSudo(metsFile, dest) + + src = metsFile.replace(transferDirectory, "%transferDirectory%") + dst = dest.replace(transferDirectory, "%transferDirectory%") + eventDetail = "" + eventOutcomeDetailNote = "moved from=\"" + src + "\"; moved to=\"" + dst + "\"" + updateFileLocation(src, dst, "movement", date, eventDetail, transferUUID=transferUUID, eventOutcomeDetailNote = eventOutcomeDetailNote) + + return exitCode + + + +if __name__ == '__main__': + metsFile = sys.argv[1] + date = sys.argv[2] + taskUUID = sys.argv[3] + transferDirectory = sys.argv[4] + transferUUID = sys.argv[5] + + + ret = verifyMetsFileSecChecksums(metsFile, date, taskUUID, transferDirectory, transferUUID, relativeDirectory=os.path.dirname(metsFile) + "/") + quit(ret) diff --git a/src/MCPClient/lib/clientScripts/quarantineSIP.sh b/src/MCPClient/lib/clientScripts/quarantineSIP.sh new file mode 100755 index 0000000000..a859c9e855 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/quarantineSIP.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Austin Trask +# @version svn: $Id$ + +sudo chown -R archivematica:archivematica "$2" +touch "$2" +chmod -R "$1" "$2" diff --git a/src/MCPClient/lib/clientScripts/removeEmptyDirectories.py b/src/MCPClient/lib/clientScripts/removeEmptyDirectories.py new file mode 100755 index 0000000000..bbc4dfa77a --- /dev/null +++ b/src/MCPClient/lib/clientScripts/removeEmptyDirectories.py @@ -0,0 +1,59 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +import os +import sys +exitCode = 0 + + +def removeEmptyDirectories(path): + if not os.path.isdir(path): + print "Not a directory: ", path + exit(1) + empty = True + for leaf in os.listdir(path): + fullPath = os.path.join(path, leaf) + try: + if os.path.isdir(fullPath): + if not removeEmptyDirectories(fullPath): + empty = False + else: + empty = False + except: + print >>sys.stderr, "Error with path:", fullPath + exitCode+=1 + if empty == True: + try: + os.rmdir(path) + print "removing empty directory:", path + except: + print >>sys.stderr, "Error removing:", path + return empty + + + +if __name__ == '__main__': + path = sys.argv[1] + removeEmptyDirectories(path) + exit(exitCode) diff --git a/src/MCPClient/lib/clientScripts/removeFilesWithoutPresmisMetadata.py b/src/MCPClient/lib/clientScripts/removeFilesWithoutPresmisMetadata.py new file mode 100755 index 0000000000..80914cc4ef --- /dev/null +++ b/src/MCPClient/lib/clientScripts/removeFilesWithoutPresmisMetadata.py @@ -0,0 +1,46 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +import sys +import os +from optparse import OptionParser + +def verifyFileUUID(fileUUID, filePath, sipDirectory): + if fileUUID == "None": + relativeFilePath = filePath.replace(sipDirectory, "%SIPDirectory%", 1) + print >>sys.stderr, relativeFilePath + os.remove(filePath) + quit(0) + + +if __name__ == '__main__': + + parser = OptionParser() + parser.add_option("-f", "--inputFile", action="store", dest="inputFile", default="") + parser.add_option("-o", "--sipDirectory", action="store", dest="sipDirectory", default="") + parser.add_option("-i", "--fileUUID", action="store", dest="fileUUID", default="") + + (opts, args) = parser.parse_args() + + verifyFileUUID(opts.fileUUID, opts.inputFile, opts.sipDirectory) diff --git a/src/MCPClient/lib/clientScripts/removeHiddenFilesAndDirectories.py b/src/MCPClient/lib/clientScripts/removeHiddenFilesAndDirectories.py new file mode 100755 index 0000000000..d5bcf15e81 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/removeHiddenFilesAndDirectories.py @@ -0,0 +1,56 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +import sys +import os +import shutil + + +def removeHiddenFilesFromDirectory(dir): + for item in os.listdir(dir): + fullPath = os.path.join(dir, item) + if os.path.isdir(fullPath): + if item.startswith("."): + print "Removing directory: ", fullPath + shutil.rmtree(fullPath) + else: + removeHiddenFilesFromDirectory(fullPath) + elif os.path.isfile(fullPath): + if item.startswith(".") or item.endswith("~"): + print "Removing file: ", fullPath + os.remove(fullPath) + + else: + print >>sys.stderr, "Not file or directory: ", fullPath + + + +if __name__ == '__main__': + transferDirectory = sys.argv[1] + removeHiddenFilesFromDirectory(transferDirectory) + + + + + diff --git a/src/MCPClient/lib/clientScripts/removeUnneededFiles.py b/src/MCPClient/lib/clientScripts/removeUnneededFiles.py new file mode 100755 index 0000000000..ba9db41dbe --- /dev/null +++ b/src/MCPClient/lib/clientScripts/removeUnneededFiles.py @@ -0,0 +1,51 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +import sys +import os +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from fileOperations import removeFileByFileUUID +removeIfFileNameIs = ["Thumbs.db", "Icon", u"Icon\u000D"] + +def removableFile(target): + global eventDetailText + basename = os.path.basename(target) + if basename in removeIfFileNameIs: + eventDetailText = basename + " is noted as a removable file." + return True + return False + +if __name__ == '__main__': + target = sys.argv[1] + fileUUID = sys.argv[2] + logsDirectory = sys.argv[3] + date = sys.argv[4] + eIDValue = sys.argv[5] + + global eventDetailText + eventDetailText = "fileRemoved" + if removableFile(target): + print fileUUID + " -> " + os.path.basename(target) + os.remove(target) + removeFileByFileUUID(fileUUID) diff --git a/src/MCPClient/lib/clientScripts/removeWithAsterisk.sh b/src/MCPClient/lib/clientScripts/removeWithAsterisk.sh new file mode 100755 index 0000000000..6656ec442d --- /dev/null +++ b/src/MCPClient/lib/clientScripts/removeWithAsterisk.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ +#source /etc/archivematica/archivematicaConfig.conf + +rm -R "$1"*"$2" +exit $? + diff --git a/src/MCPClient/lib/clientScripts/restructureBAG.sh b/src/MCPClient/lib/clientScripts/restructureBAG.sh new file mode 100755 index 0000000000..06f73f2303 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/restructureBAG.sh @@ -0,0 +1,57 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry +# @version svn: $Id$ + +set -e +BAG="$1" +DATE="$2" +DIRNAME="`dirname "$BAG"`" +tmpUUID="`uuid`" +mv "${BAG}data" "${DIRNAME}/${tmpUUID}" +mkdir -p "${DIRNAME}/${tmpUUID}/metadata/bagit" +mv "${BAG}"* "${DIRNAME}/${tmpUUID}/metadata/bagit/." +rm -r "${BAG}" +mv "${DIRNAME}/${tmpUUID}" "${BAG}" +if [ ! -d "${BAG}objects" ]; then + mkdir "${DIRNAME}/${tmpUUID}" + mkdir "${DIRNAME}/${tmpUUID}/objects" + mv "${BAG}"* "${DIRNAME}/${tmpUUID}/." + rm -r "${BAG}" + mv "${DIRNAME}/${tmpUUID}" "${BAG}" +fi +if [ ! -d "${BAG}logs" ]; then + mkdir "${BAG}logs" +fi +if [ ! -d "${BAG}logs/fileMeta" ]; then + mkdir "${BAG}logs/fileMeta" +fi + +if [ ! -d "${BAG}metadata" ]; then + mkdir "${BAG}metadata" +fi +if [ ! -d "${BAG}metadata/submissionDocumentation" ]; then + mkdir "${BAG}metadata/submissionDocumentation" +fi +if [ ! -e "${BAG}logs/acquiredSIPDateTime.log" ]; then + echo ${DATE} > "${BAG}logs/acquiredSIPDateTime.log" +fi diff --git a/src/MCPClient/lib/clientScripts/restructureDIPForContentDMUpload.py b/src/MCPClient/lib/clientScripts/restructureDIPForContentDMUpload.py new file mode 100755 index 0000000000..0ab8b08d79 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/restructureDIPForContentDMUpload.py @@ -0,0 +1,737 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Mark Jordan +# @version svn: $Id$ + +import argparse +import os +import sys +import glob +import shutil +import json +import urllib +import csv +import collections +import zipfile +import pprint # remove for production +from xml.dom.minidom import parse, parseString + +# pp = pprint.PrettyPrinter(indent=4) # Remove after development. + +# Create the output dir for the CONTENTdm DIP. importMethod is either 'projectclient' +# or 'directupload'. Also return the path. +def prepareOutputDir(outputDipDir, importMethod, dipUuid): + outputDipDir = os.path.join(outputDipDir, importMethod, dipUuid) + # Check for and then delete a subdirectory named after the current package. We always want + # a clean output directory for the import package. + if os.path.exists(outputDipDir): + shutil.rmtree(outputDipDir) + os.makedirs(outputDipDir) + return outputDipDir + + +# Takes in a DOM object containing the Dublin Core XML, returns a dictionary with +# tag : [ value1, value2] members. Also, since minidom only handles byte strings +# so we need to encode strings before passing them to minidom functions. label is +# an optional arguement for use with compound item children, which may not have a +# dublincore object. +def parseDcXml(dublincore, label = '[Placeholder title]'): + if dublincore is None: + return {'title' : [label]} + # If the dublincore object is empty (i.e, there is no DC metadata), return + # a placeholder title. + if not hasattr(dublincore, 'getElementsByTagName'): + return {'title' : [label]} + + # If we are dealing with a DOM object representing the Dublin Core metadata, + # check to see if there is a title (required by CONTENTdm). If not, assign a + # placeholder title. + if hasattr(dublincore, 'getElementsByTagName'): + dcTitlesDom = dublincore.getElementsByTagName('title') + if not dcTitlesDom: + return {'title' : '[Placeholder title]'} + + # Get the elements found in the incoming DC XML DOM object. + dcElementsDom = dublincore.getElementsByTagName('*') + dcElementsDict = {} + for dcElement in dcElementsDom: + # We only want elements that are not empty. + if dcElement.firstChild: + # To get the values of repeated DC elements, we need to create a list to correspond + # to each element name. If the element name is not yet a key in dcElementsDict, + # create the element's value list. + if dcElement.tagName not in dcElementsDict: + dcElementsDict[dcElement.tagName.encode("utf-8")] = [dcElement.firstChild.nodeValue.encode("utf-8")] + # If the element name is present in dcElementsDict, append the element's value to + # its value list. + else: + dcElementsDict[dcElement.tagName.encode("utf-8")].append(dcElement.firstChild.nodeValue.encode("utf-8")) + return dcElementsDict + + +# Takes in a DOM object containing the METS structMap, returns a dictionary with +# fptrValue : [ order, parent, dmdSec, label, filename ] members. +# Files in the DIP objects directory start with the UUID (i.e., first 36 characters of the filename) +# of the of the file named in the fptr FILEID in the structMap; each file ends in the UUID. +# Also, we are only interested in divs that are direct children of a div with TYPE=directory and LABEL=objects: +#
+#
+#
+# +#
+def parseStructMap(structMap, filesInObjectDirectory): + structMapDict = {} + # Get filenames of all the files in the objects directory (recursively); + # filesInObjectDirectory contains paths, we need to get the filename only + # for the structMap checking. Add each filename to structMapDict. + filesInObjectDir = [] + for file in filesInObjectDirectory: + head, tail = os.path.split(file) + filesInObjectDir.append(tail) + + # Get all the fptr elements. + fptrOrder = 0 + for node in structMap.getElementsByTagName('fptr'): + for k, v in node.attributes.items(): + if k == 'FILEID': + # parentDivDmdId is a placeholder for when we support compound + # items with their own descriptive metadata. + parentDivDmdId = node.parentNode.getAttribute('DMDID') + filename = getFptrObjectFilename(v, filesInObjectDir) + # We only want entries for files that are in the objects directory. + if filename != None: + parentDivLabel = node.parentNode.getAttribute('LABEL') + # If the parent div doesn't have a LABEL, use the filesname as the label. + if not len(parentDivLabel): + parentDivLabel = filename + fptrOrder = fptrOrder + 1 + structMapDict[v] = { + # Python has no natsort, so we padd fptOrder with up to + # 4 zeros to make it more easily sortable. + 'order' : str(fptrOrder).zfill(5), + 'parent' : '', # Placeholder for when we support hierarchical items. + 'filename' : filename, + 'label' : parentDivLabel, + 'dmdSec' : parentDivDmdId + } + + return structMapDict + + +# Given a ftpr value (which looks like this: +# P1050154.JPG-09869659-fc89-46ce-ad1c-fe166becccca), return the +# name of the corresponding file from the DIP objects directory. +def getFptrObjectFilename(fptrValue, filesInObjectDir): + # Assumes UUID is the last 36 characters of the fptr value. + uuid = fptrValue[-36:] + for filename in filesInObjectDir: + if uuid in filename: + return filename + + +# Generate a directory containing 1) 'mappings', a nested dictionary with DCTERMS +# elememts as keys, each of which has as its values the CONTENTdm nick and name for +# the corresponding field in the current collection and 2), 'order', a list of the +# collection's field nicks, which is needed to write out the metadata in the correct +# field order. The Archivematica metadata CRUD form only uses the legacy unqualified +# DC elements but we include the entire CONTENTdm DCTERMS mappings because the entire +# set of DCTERMS are supported in dublincore.xml files included in the transfer +# packages' metadata directory. +def getContentdmCollectionFieldInfo(contentdmServer, targetCollection): + collectionFieldInfo = {} + # First, define the CONTENTdm DC nicknames -> DCTERMs mapping. + contentdmDctermsMap = { + 'describ' : 'abstract', + 'rightsa' : 'accessRights', + 'accrua' : 'accrualMethod', + 'accrub' : 'accrualPeriodicity', + 'accruc' : 'accrualPolicy', + 'titlea' : 'alternative', + 'audien' : 'audience', + 'datec' : 'available', + 'identia' : 'bibliographicCitation', + 'relatim' : 'conformsTo', + 'contri' : 'contributor', + 'covera' : 'coverage', + 'datea' : 'created', + 'creato' : 'creator', + 'date' : 'date', + 'datef' : 'dateAccepted', + 'dateg' : 'dateCopyrighted', + 'dateh' : 'dateSubmitted', + 'descri' : 'description', + 'audienb' : 'educationLevel', + 'formata' : 'extent', + 'format' : 'format', + 'relatil' : 'hasFormat', + 'relatih' : 'hasPart', + 'relatib' : 'hasVersion', + 'identi' : 'identifier', + 'instru' : 'instructionalMethod', + 'relatik' : 'isFormatOf', + 'relatig' : 'isPartOf', + 'relatii' : 'isReferencedBy', + 'relatic' : 'isReplacedBy', + 'relatie' : 'isRequiredBy', + 'relatia' : 'isVersionOf', + 'dated' : 'issued', + 'langua' : 'language', + 'rightsb' : 'license', + 'audiena' : 'mediator', + 'formatb' : 'medium', + 'datee' : 'modified', + 'proven' : 'provenance', + 'publis' : 'publisher', + 'relatij' : 'references', + 'relati' : 'relation', + 'relatid' : 'replaces', + 'relatif' : 'requires', + 'rights' : 'rights', + 'rightsc' : 'rightsHolder', + 'source' : 'source', + 'coveraa' : 'spatial', + 'subjec' : 'subject', + 'descria' : 'tableOfContents', + 'coverab' : 'temporal', + 'title' : 'title', + 'type' : 'type', + 'dateb' : 'valid', + } + # Query CONTENTdm to get the target collection's field configurations. + CollectionFieldConfigUrl = 'http://' + contentdmServer + '/dmwebservices/index.php?q=dmGetCollectionFieldInfo' + targetCollection + '/json' + try: + f = urllib.urlopen(CollectionFieldConfigUrl) + collectionFieldConfigString = f.read() + collectionFieldConfig = json.loads(collectionFieldConfigString) + except: + print "Cannot retrieve CONTENTdm collection field configuration from " + CollectionFieldConfigUrl + sys.exit(1) + + # We want a dict containing items that looks like + # { 'contributor': { 'name': u'Contributors', 'nick': u'contri'}, + # 'creator': { 'name': u'Creator', 'nick': u'creato'}, + # 'date': { 'name': u'Date', 'nick': u'dateso'}, [...] } + # We need these field-specific mappings when writing out metadata files for loading + # into CONTENTdm. It is possible that more than one CONTENTdm field is mapped to + # the same DC element; in this case, just take the last mapping and ignore the rest, + # since there is no way to tell which should take precedence. + collectionFieldMappings = {} + # We also want a simple list of all the fields in the current collection. + collectionFieldOrder = [] + for fieldConfig in collectionFieldConfig: + for k, v in fieldConfig.iteritems(): + if fieldConfig['dc'] != 'BLANK' and fieldConfig['dc'] != '': + collectionFieldMappings[contentdmDctermsMap[fieldConfig['dc']]] = {'nick' : fieldConfig['nick'] , 'name' : fieldConfig['name']} + collectionFieldOrder.append(fieldConfig['nick']) + collectionFieldInfo['mappings'] = collectionFieldMappings + collectionFieldInfo['order'] = collectionFieldOrder + return collectionFieldInfo + + +# Return the dmdSec with the specific ID value. If dublinCore is True, return +# the child node only. +def getDmdSec(metsDom, dmdSecId = 'dmdSec_1', dublinCore = True): + for node in metsDom.getElementsByTagName('dmdSec'): + for k, v in node.attributes.items(): + if dublinCore and k == 'ID' and v == dmdSecId: + # Assumes there is only one dublincore child element. + return node.getElementsByTagName('dublincore')[0] + else: + return node + + +# Get a list of all the files (recursive) in the DIP object directory. Even though there +# can be subdirectories in the objects directory, assumes each file should have a unique name. +def getObjectDirectoryFiles(objectDir): + fileList = [] + for root, subFolders, files in os.walk(objectDir): + for file in files: + fileList.append(os.path.join(root, file)) + return fileList + + +# Create a .zip from the DIP files produced by generateXXProjectClientPackage functions. +# Zip files are written in the uploadedDIPs directory. +def zipProjectClientOutput(outputDipDir, dipUuid, type): + outputFile = zipfile.ZipFile(outputDipDir + ".zip", "w") + sourceFilesRoot = glob.glob(os.path.join(outputDipDir, '*')) + # For each of the files in the DIP directionn root directory, prepend the DIP UUID + # to the filename so the zip file will unzip into the corresponding directory. + for rootSourceFilename in sourceFilesRoot: + destFilename = os.path.join(dipUuid, os.path.basename(rootSourceFilename)) + outputFile.write(rootSourceFilename, destFilename, zipfile.ZIP_DEFLATED) + + if type is 'compound': + sourceFilesScans = glob.glob(os.path.join(outputDipDir, 'scans', '*')) + if not len(sourceFilesScans): + print "No DIP files found." + sys.exit(1) + # For each of the files in the 'scans' subdirectory, prepend the DIP UUID to the + # filename so the zip file will unzip into the corresponding directory. + for scansSourceFilename in sourceFilesScans: + destFilename = os.path.join(dipUuid, 'scans', os.path.basename(scansSourceFilename)) + outputFile.write(scansSourceFilename, destFilename, zipfile.ZIP_DEFLATED) + + outputFile.close() + + +# Generate a .desc file used in CONTENTdm 'direct import' packages. +# .desc file looks like this: +# +# +# wall +# [... every collection field nick, empty and with values] +# +# +# +# +# +# +# +# +# +# +# +# +# +def generateDescFile(dcMetadata): + collectionFieldInfo = getContentdmCollectionFieldInfo(args.contentdmServer, args.targetCollection) + output = '' + "\n" + output += "\n" + + # Loop through the collection's field configuration and generate XML elements + # for all its fields. + for dcElement in collectionFieldInfo['mappings'].keys(): + # If a field is in the incoming item dcMetadata, populate the corresponding tag + # with its value. + if dcElement in dcMetadata.keys(): + values = '' + output += '<' + dcElement + '>' + # Repeated values in CONTENTdm metadata need to be separated with semicolons. + for value in dcMetadata[dcElement]: + values += value + '; ' + output += values.rstrip('; ') + output += '\n" + # We need to include elements that are in the collection field config but + # that do not have any values for the current item. + else: + output += '<' + dcElement + '>\n" + + # These fields are boilerplate in new .desc files. + output += "\n" + output += "\n" + output += "\n" + output += "\n" + output += "\n" + output += "\n" + output += "\n" + output += "\n" + output += "\n" + output += "\n" + output += "\n" + output += "\n" + output += "\n" + return output + + +# Return a DOM object containing a skeletal Dublin Core XML structure +# comprised of a element. Used for generating .desc files for +# compound item children. +def generateCompoundItemChildDmdSec(label): + dublinCore = '<dublincore>' + dublinCore += '<title>' + label + '' + dublinCore += """ + + + + + + + + + + + + + + +""" + dublinCoreDom = parseString(dublinCore.encode('utf-8')) + return dublinCoreDom + + +# Generate an object file's entry in the .full file. +def generateFullFileEntry(title, filename, extension): + fullFileContent = "\n" + fullFileContent += " " + title + "\n" + fullFileContent += " " + filename + extension + "\n" + fullFileContent += " " + filename + ".desc\n" + fullFileContent += " " + filename + ".icon\n" + fullFileContent += " 0\n nopdf\n" + fullFileContent += "\n" + return fullFileContent + + +# Generate a 'direct upload' package for a simple item from the Archivematica DIP. +# This package will contain the object file, its thumbnail, a .desc (DC metadata) file, +# and a .full (manifest) file. +def generateSimpleContentDMDirectUploadPackage(metsDom, dipUuid, outputDipDir, filesInObjectDirectory, filesInThumbnailDirectory): + outputDipDir = prepareOutputDir(outputDipDir, 'directupload', dipUuid) + dmdSec = getDmdSec(metsDom) + dcMetadata = parseDcXml(dmdSec) + descFileContents = generateDescFile(dcMetadata) + # Write the .desc file into the output directory. + descFile = open(os.path.join(outputDipDir, dipUuid + '.desc'), "wb") + descFile.write(descFileContents) + descFile.close() + + # Copy the thumbnail into the output directory. There will only be one. + # The file must end in .icon. + shutil.copy(filesInThumbnailDirectory[0], os.path.join(outputDipDir, dipUuid + '.icon')) + + # Copy the object file (there will only be one) into the output directory, giving it the + # same name as the other files in the package and the extension of its source file. + objectFileFilename, objectFileFileExtension = os.path.splitext(filesInObjectDirectory[0]) + shutil.copy(filesInObjectDirectory[0], os.path.join(outputDipDir, dipUuid + objectFileFileExtension)) + + fullFileContents = generateFullFileEntry(dipUuid + objectFileFileExtension, dipUuid, objectFileFileExtension) + fullFile = open(os.path.join(outputDipDir, dipUuid + '.full'), "wb") + fullFile.write(fullFileContents) + fullFile.close() + + +# Generate a 'project client' package for a simple item from the Archivematica DIP. +# This package will contain the object file and a delimited metadata file in a format +# suitable for importing into CONTENTdm using its Project Client. +def generateSimpleContentDMProjectClientPackage(metsDom, dipUuid, outputDipDir, filesInObjectDirectory): + dmdSec = getDmdSec(metsDom) + dcMetadata = parseDcXml(dmdSec) + outputDipDir = prepareOutputDir(outputDipDir, 'projectclient', dipUuid) + + for file in filesInObjectDirectory: + # First, copy the file into the output directory. + shutil.copy(file, outputDipDir) + + # Then, write out a tab-delimited file containing the DC-mapped metadata, + # with 'Filename' as the last field. + collectionFieldInfo = getContentdmCollectionFieldInfo(args.contentdmServer, args.targetCollection) + # Write out the metadata file, with the first row containing the field + # labels and the second row containing the values. Both rows needs to be + # in the order expressed in collectionFieldInfo['order']. For each item in + # collectionFieldInfo['order'], query each mapping in collectionFieldInfo['mappings'] + # to find a matching 'nick'; if the nick is found, write the value in the dmdSec's + # element that matches the mapping's key; if no matching mapping is found, write ''. + # The DIP filename (in this case, the file variable defined above) needs to go in + # the last column. + delimHeaderRow = [] + delimValuesRow = [] + for field in collectionFieldInfo['order']: + for k, v in collectionFieldInfo['mappings'].iteritems(): + if field == v['nick']: + # Append the field name to the header row. + delimHeaderRow.append(v['name']) + # Append the element value to the values row. + if k in dcMetadata: + # In CONTENTdm, repeated values are joined with a semicolon. + joinedDcMetadataValues = '; '.join(dcMetadata[k]) + # Rows can't contain new lines. + joinedDcMetadataValues = joinedDcMetadataValues.replace("\r","") + joinedDcMetadataValues = joinedDcMetadataValues.replace("\n","") + delimValuesRow.append(joinedDcMetadataValues) + # Append a placeholder to keep the row intact. + else: + delimValuesRow.append('') + + delimitedFile = open(os.path.join(outputDipDir, 'simple.txt'), "wb") + writer = csv.writer(delimitedFile, delimiter='\t') + delimHeaderRow.append('Filename') # Must contain 'Filename' in last position + writer.writerow(delimHeaderRow) + head, tail = os.path.split(file) + delimValuesRow.append(tail) # Must contain filename in last position + writer.writerow(delimValuesRow) + delimitedFile.close() + + zipProjectClientOutput(outputDipDir, dipUuid, 'simple') + # Delete the unzipped version of the DIP since we don't use it anyway. + shutil.rmtree(outputDipDir) + +# Generate a 'direct upload' package for a compound item from the Archivematica DIP. +# Consults the structMap and write out a corresponding structure (.cpd) file. Also, +# for every file, copy the file, create an .icon, create a .desc file, plus create +# index.desc, index.cpd, index.full, and ready.txt. @todo: If a user-submitted +# structMap is present, use it to order the files. +def generateCompoundContentDMDirectUploadPackage(metsDom, dipUuid, outputDipDir, filesInObjectDirectory, filesInThumbnailDirectory): + outputDipDir = prepareOutputDir(outputDipDir, 'directupload', dipUuid) + dmdSec = getDmdSec(metsDom) + dcMetadata = parseDcXml(dmdSec) + descFileContents = generateDescFile(dcMetadata) + # Output a .desc file for the parent item (index.desc). + descFile = open(os.path.join(outputDipDir, 'index.desc'), "wb") + descFile.write(descFileContents) + descFile.close() + + # Start to build the index.cpd file. + # @todo: will be 'Monograph' for hierarchical items. + cpdFileContent = "\n Document\n" + + # Start to build the index.full file. + fullFileContent = '' + # Populate the 'full' elements for the parent item. + titleValues = '' + for titleValue in dcMetadata['title']: + titleValues += titleValue + '; ' + titleValues = titleValues.rstrip('; ') + fullFileContents = generateFullFileEntry(titleValues, 'index', '.cpd') + + # Archivematica's stuctMap is always the first one; the user-submitted structMap + # is always the second one. @todo: If the user-submitted structMap is present, + # parse it for the SIP structure so we can use that structure in the CONTENTdm packages. + structMapDom = metsDom.getElementsByTagName('structMap')[0] + structMapDict = parseStructMap(structMapDom, filesInObjectDirectory) + + # Determine the order in which we will add the child-level rows to the .cpd and .full files. + Orders = [] + for fptr, details in structMapDict.iteritems(): + Orders.append(details['order']) + + # Iterate through the list of order values and add the matching structMapDict entry + # to the delimited file (and copy the file into the scans directory). + for order in sorted(Orders): + for k, v in structMapDict.iteritems(): + # Get each access file's base filesname without extension, since we'll use it + # for the .icon and .desc files. + accessFileBasenameName, accessFileBasenameExt = os.path.splitext(v['filename']) + + # Get the name of the first file in the sorted order; we use this later to create + # a thumbnail for current parent item. + if v['order'] == '00001': + parentThumbnailFilename = accessFileBasenameName + '.icon' + + if order == v['order']: + # Process each object file. + for fullPath in filesInObjectDirectory: + # For each object file, output the object file. We need to find the full path + # of the file identified in v['filename']. + if (v['filename'] in fullPath): + shutil.copy(fullPath, outputDipDir) + + # For each object file, copy the thumbnail in the DIP to the import package. + # The file must have the same name as the object file but it must end in .icon. + for thumbnailFilePath in filesInThumbnailDirectory: + thumbnailBasename = os.path.basename(thumbnailFilePath) + # Strip off thumbnail extension so we can match on the name. + thumbnailBasenameName, thumbnailBasenameext = os.path.splitext(thumbnailBasename) + if (thumbnailBasenameName in v['filename']): + thumbnailFilename = accessFileBasenameName + '.icon' + shutil.copy(thumbnailFilePath, os.path.join(outputDipDir, thumbnailFilename)) + + # For each object file, output a .desc file. For object files that do not + # have their own child-level descriptions (i.e., all object files currently), + # we need to use the filename as the title if there isn't a user-supplied + # csv or structMap to provide labels as per + # https://www.archivematica.org/wiki/CONTENTdm_integration. + dcMetadata = parseDcXml(None, v['label']) + descFileContents = generateDescFile(dcMetadata) + descFilename = accessFileBasenameName + '.desc' + descFile = open(os.path.join(outputDipDir, descFilename), "wb") + descFile.write(descFileContents) + descFile.close() + + # For each object file, add its .full file values. These entries do not + # have anything in their elements. + fullFileContents += generateFullFileEntry('', accessFileBasenameName, accessFileBasenameExt) + # For each object file, add its .cpd file values. + # @todo: We will need to account for hierarchical items here. + cpdFileContent += " <page>\n" + cpdFileContent += " <pagetitle>" + v['label'] + "</pagetitle>\n" + cpdFileContent += " <pagefile>" + v['filename'] + "</pagefile>\n" + cpdFileContent += " <pageptr>+</pageptr>\n" + cpdFileContent += " </page>\n" + + # Write out the index.full file. + fullFile = open(os.path.join(outputDipDir, 'index.full'), "wb") + fullFile.write(fullFileContents) + fullFile.close() + + # Write out the index.cpd file. We get the order of the items in the .cpd file + # from the user-submitted structMap (if it is present) or the Archivematica + # structMap (if no user-submitted structMap is present). + cpdFileContent += '</cpd>' + indexCpdFile = open(os.path.join(outputDipDir, 'index.cpd'), "wb") + indexCpdFile.write(cpdFileContent) + indexCpdFile.close() + + # Create a thumbnail for the parent item (index.icon), using the icon from the first item + # in the METS file. parentThumbnailFilename + shutil.copy(os.path.join(outputDipDir, parentThumbnailFilename), os.path.join(outputDipDir, 'index.icon')) + + # Write out the ready.txt file which contains the string '1'. + readyFile = open(os.path.join(outputDipDir, 'ready.txt'), "wb") + readyFile.write('1') + readyFile.close() + + +# Generate a 'project client' package for a compound CONTENTdm item from the Archivematica DIP. +# This package will contain the object file and a delimited metadata file in a format suitable +# for importing into CONTENTdm using its Project Client. +def generateCompoundContentDMProjectClientPackage(metsDom, dipUuid, outputDipDir, filesInObjectDirectory): + dmdSec = getDmdSec(metsDom) + dcMetadata = parseDcXml(dmdSec) + + # Archivematica's stuctMap is always the first one; the user-submitted structMap + # is always the second one. @todo: If the user-submitted structMap is present, + # parse it for the SIP structure so we can use that structure in the CONTENTdm packages. + structMapDom = metsDom.getElementsByTagName('structMap')[0] + structMapDict = parseStructMap(structMapDom, filesInObjectDirectory) + + outputDipDir = prepareOutputDir(outputDipDir, 'projectclient', dipUuid) + + # Create a 'scans' subdirectory in the output directory. + scansDir = os.path.join(outputDipDir, 'scans') + os.makedirs(scansDir) + + # Write out the metadata file, with the first row containing the field labels and the + # second row containing the values. Both rows needs to be in the order expressed in + # collectionFieldInfo['order']. For each item in collectionFieldInfo['order'], + # query each mapping in collectionFieldInfo['mappings'] to find a matching 'nick'; + # if the nick is found, write the value in the dmdSec's element that matches the mapping's + # key; if no matching mapping is found, write ''. The DIP filename (in this case, the file + # variable defined above) needs to go in the last column. + collectionFieldInfo = getContentdmCollectionFieldInfo(args.contentdmServer, args.targetCollection) + delimHeaderRow = [] + delimItemValuesRow = [] + for field in collectionFieldInfo['order']: + for k, v in collectionFieldInfo['mappings'].iteritems(): + if field == v['nick']: + # Append the field name to the header row. + delimHeaderRow.append(v['name']) + # Append the element value to the values row. + if k in dcMetadata: + # In CONTENTdm, repeated values are joined with a semicolon. + joinedDcMetadataValues = '; '.join(dcMetadata[k]) + # Rows can't contain new lines. + joinedDcMetadataValues = joinedDcMetadataValues.replace("\r","") + joinedDcMetadataValues = joinedDcMetadataValues.replace("\n","") + delimItemValuesRow.append(joinedDcMetadataValues) + # Append a placeholder to keep the row intact. + else: + delimItemValuesRow.append('') + + delimitedFile = open(os.path.join(outputDipDir, 'compound.txt'), "wb") + writer = csv.writer(delimitedFile, delimiter='\t') + # Write the header row. + delimHeaderRow.append('Filename') # Must contain 'Filename' in last position + writer.writerow(delimHeaderRow) + # Write the item-level metadata row. + writer.writerow(delimItemValuesRow) + + # Determine the order in which we will add the child-level rows to the delimited file. + Orders = [] + for fptr, details in structMapDict.iteritems(): + Orders.append(details['order']) + + # Iterate through the list of order values and add the matching structMapDict entry + # to the delimited file (and copy the file into the scans directory). + for order in sorted(Orders): + for k, v in structMapDict.iteritems(): + if order == v['order']: + delimChildValuesRow = [] + # Find the full path of the file identified in v['filename']. + for fullPath in filesInObjectDirectory: + if (v['filename'] in fullPath): + shutil.copy(fullPath, scansDir) + + # Write the child-level metadata row. @todo: For flat items with no + # child-level metadata, we are using the label for the child as defined + # in structMapDict and the filename only. This means that we put the + # label in the position allocated for the dc.title element, and the + # filename in the last position. Everthing in between is ''. This will + # need to be made more functional for flat with child-level metadata, + # and for hierarchical. + titlePosition = collectionFieldInfo['order'].index('title') + if titlePosition == 0: + delimChildValuesRow.append(v['label']) + for i in range(1, len(delimHeaderRow) - 1): + delimChildValuesRow.append('') + # Rows must contain filename in last position. + delimChildValuesRow.append(v['filename']) + writer.writerow(delimChildValuesRow) + + delimitedFile.close() + + zipProjectClientOutput(outputDipDir, dipUuid, 'compound') + # Delete the unzipped version of the DIP since we don't use it anyway. + shutil.rmtree(outputDipDir) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='restructure') + parser.add_argument('--uuid', action="store", dest='uuid', metavar='UUID', help='SIP-UUID') + parser.add_argument('--dipDir', action="store", dest='dipDir', metavar='dipDir', help='DIP Directory') + parser.add_argument('--server', action="store", dest='contentdmServer', metavar='server', help='Target CONTENTdm server') + parser.add_argument('--collection', action="store", dest='targetCollection', + metavar='targetCollection', help='Target CONTENTdm Collection') + parser.add_argument('--ingestFormat', action="store", dest='ingestFormat', metavar='ingestFormat', + default='directupload', help='The format of the ingest package, either directupload or projectclient') + parser.add_argument('--outputDir', action="store", dest='outputDir', metavar='outputDir', + help='The destination for the restructured DIPs') + + args = parser.parse_args() + + # Define the directory where DIPs are waiting to be processed. + inputDipDir = args.dipDir + # Use %watchDirectoryPath%uploadedDIPs as the output directory for the directupload and projectclient output. + # We also create a 'CONTENTdm' subdirectory for DIPs created by this microservice. + outputDipDir = os.path.join(args.outputDir, 'CONTENTdm') + if not os.path.exists(outputDipDir): + os.makedirs(outputDipDir) + + # Perform some preliminary validation on the argument values. + if not os.path.exists(inputDipDir): + print "Sorry, can't find " + inputDipDir + sys.exit(1) + + if args.ingestFormat not in ['directupload', 'projectclient']: + print "Sorry, ingestFormat must be either 'directupload' or 'projectclient'" + sys.exit(1) + + # Read and parse the METS file. Assumes there is one METS file in the DIP directory. + for infile in glob.glob(os.path.join(inputDipDir, "METS*.xml")): + metsFile = infile + metsDom = parse(metsFile) + + # Check to see if we're dealing with a simple or compound item, and fire the + # appropriate DIP-generation function. + filesInObjectDirectory = getObjectDirectoryFiles(os.path.join(inputDipDir, 'objects')) + if os.path.exists(os.path.join(inputDipDir, 'thumbnails')): + filesInThumbnailDirectory = glob.glob(os.path.join(inputDipDir, 'thumbnails', "*.jpg")) + + if len(filesInObjectDirectory) == 1 and args.ingestFormat == 'directupload': + generateSimpleContentDMDirectUploadPackage(metsDom, args.uuid, outputDipDir, filesInObjectDirectory, filesInThumbnailDirectory) + if len(filesInObjectDirectory) == 1 and args.ingestFormat == 'projectclient': + generateSimpleContentDMProjectClientPackage(metsDom, args.uuid, outputDipDir, filesInObjectDirectory) + + if len(filesInObjectDirectory) > 1 and args.ingestFormat == 'directupload': + generateCompoundContentDMDirectUploadPackage(metsDom, args.uuid, outputDipDir, filesInObjectDirectory, filesInThumbnailDirectory) + if len(filesInObjectDirectory) > 1 and args.ingestFormat == 'projectclient': + generateCompoundContentDMProjectClientPackage(metsDom, args.uuid, outputDipDir, filesInObjectDirectory) + diff --git a/src/MCPClient/lib/clientScripts/restructureForCompliance.py b/src/MCPClient/lib/clientScripts/restructureForCompliance.py new file mode 100755 index 0000000000..332d8066dd --- /dev/null +++ b/src/MCPClient/lib/clientScripts/restructureForCompliance.py @@ -0,0 +1,95 @@ +#!/usr/bin/python -OO +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +import os +import sys +import shutil +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from fileOperations import updateDirectoryLocation +from fileOperations import updateFileLocation2 + + +requiredDirectories = ["logs", "logs/fileMeta", "metadata", "metadata/submissionDocumentation", "objects"] +optionalFiles = "processingMCP.xml" + +def restructureBagForComplianceFileUUIDsAssigned(unitPath, unitIdentifier, unitIdentifierType, unitPathReplaceWith = "%transferDirectory%"): + bagFileDefaultDest = os.path.join(unitPath, "logs", "BagIt") + requiredDirectories.append(bagFileDefaultDest) + unitDataPath = os.path.join(unitPath, "data") + for dir in requiredDirectories: + dirPath = os.path.join(unitPath, dir) + dirDataPath = os.path.join(unitPath, "data", dir) + if os.path.isdir(dirDataPath): + #move to the top level + src = dirDataPath + dst = dirPath + updateDirectoryLocation(src, dst, unitPath, unitIdentifier, unitIdentifierType, unitPathReplaceWith) + print "moving directory ", dir + else: + print "creating: ", dir + os.mkdir(dirPath) + for item in os.listdir(unitPath): + src = os.path.join(unitPath, item) + if os.path.isfile(src): + if item.startswith("manifest"): + dst = os.path.join(unitPath, "metadata", item) + else: + dst = os.path.join(bagFileDefaultDest, item) + updateFileLocation2(src, dst, unitPath, unitIdentifier, unitIdentifierType, unitPathReplaceWith) + for item in os.listdir(unitDataPath): + itemPath = os.path.join(unitDataPath, item) + if os.path.isdir(itemPath) and item not in requiredDirectories: + print "moving directory to objects: ", item + dst = os.path.join(unitPath, "objects", item) + updateDirectoryLocation(itemPath, dst, unitPath, unitIdentifier, unitIdentifierType, unitPathReplaceWith) + elif os.path.isfile(itemPath) and item not in optionalFiles: + print "moving file to objects: ", item + dst = os.path.join(unitPath, "objects", item) + updateFileLocation2(itemPath, dst, unitPath, unitIdentifier, unitIdentifierType, unitPathReplaceWith) + print "removing empty data directory" + os.rmdir(unitDataPath) + +def restructureForComplianceFileUUIDsAssigned(unitPath, unitIdentifier, unitIdentifierType): + print "Not implemented" + print unitUUID, unitType + +def restructureDirectory(unitPath): + for dir in requiredDirectories: + dirPath = os.path.join(unitPath, dir) + if not os.path.isdir(dirPath): + os.mkdir(dirPath) + print "creating: ", dir + for item in os.listdir(unitPath): + dst = os.path.join(unitPath, "objects") + "/." + itemPath = os.path.join(unitPath, item) + if os.path.isdir(itemPath) and item not in requiredDirectories: + shutil.move(itemPath, dst) + print "moving directory to objects: ", item + elif os.path.isfile(itemPath) and item not in optionalFiles: + shutil.move(itemPath, dst) + print "moving file to objects: ", item + +if __name__ == '__main__': + target = sys.argv[1] + restructureDirectory(target) + diff --git a/src/MCPClient/lib/clientScripts/restructureForComplianceMaildir.py b/src/MCPClient/lib/clientScripts/restructureForComplianceMaildir.py new file mode 100755 index 0000000000..e1c5183a3a --- /dev/null +++ b/src/MCPClient/lib/clientScripts/restructureForComplianceMaildir.py @@ -0,0 +1,52 @@ +#!/usr/bin/python -OO +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +import os +import sys +import shutil + + + +requiredDirectories = ["logs", "logs/fileMeta", "metadata", "metadata/submissionDocumentation", "objects", "objects/Maildir"] +optionalFiles = "processingMCP.xml" + +def restructureMaildirDirectory(unitPath): + for dir in requiredDirectories: + dirPath = os.path.join(unitPath, dir) + if not os.path.isdir(dirPath): + os.mkdir(dirPath) + print "creating: ", dir + for item in os.listdir(unitPath): + dst = os.path.join(unitPath, "objects", "Maildir") + "/." + itemPath = os.path.join(unitPath, item) + if os.path.isdir(itemPath) and item not in requiredDirectories: + shutil.move(itemPath, dst) + print "moving directory to objects/Maildir: ", item + elif os.path.isfile(itemPath) and item not in optionalFiles: + shutil.move(itemPath, dst) + print "moving file to objects/Maildir: ", item + +if __name__ == '__main__': + target = sys.argv[1] + restructureMaildirDirectory(target) + diff --git a/src/MCPClient/lib/clientScripts/sanitizeObjectNames.py b/src/MCPClient/lib/clientScripts/sanitizeObjectNames.py new file mode 100755 index 0000000000..a34b0d351a --- /dev/null +++ b/src/MCPClient/lib/clientScripts/sanitizeObjectNames.py @@ -0,0 +1,132 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ +import sys +import shlex +import subprocess +import os +import MySQLdb +import uuid +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +from databaseFunctions import insertIntoEvents +from fileOperations import updateFileLocation +from archivematicaFunctions import unicodeToStr + +if __name__ == '__main__': + objectsDirectory = sys.argv[1] #the directory to run sanitization on. + sipUUID = sys.argv[2] + date = sys.argv[3] + taskUUID = sys.argv[4] + groupType = sys.argv[5] + groupType = "%%%s%%" % (groupType) + groupSQL = sys.argv[6] + sipPath = sys.argv[7] #the unit path + groupID = sipUUID + + #relativeReplacement = "%sobjects/" % (groupType) #"%SIPDirectory%objects/" + relativeReplacement = objectsDirectory.replace(sipPath, groupType, 1) #"%SIPDirectory%objects/" + + + #def executeCommand(taskUUID, requiresOutputLock = "no", sInput = "", sOutput = "", sError = "", execute = "", arguments = "", serverConnection = None): + command = "sanitizeNames \"" + objectsDirectory + "\"" + lines = [] + commandVersion = "sanitizeNames -V" + version = "" + try: + p = subprocess.Popen(shlex.split(command), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + #p.wait() + output = p.communicate() + retcode = p.returncode + + #print output + #print output[1] + #print >>sys.stderr, output[1] + + #it executes check for errors + if retcode != 0: + print >>sys.stderr, "error code:" + retcode.__str__() + print >>sys.stderr, output[1]# sError + quit(retcode) + lines = output[0].split("\n") + + #GET VERSION + p = subprocess.Popen(shlex.split(commandVersion), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + #p.wait() + output = p.communicate() + retcode = p.returncode + + #it executes check for errors + if retcode != 0: + print >>sys.stderr, "Error getting version; error code:" + retcode.__str__() + print output[1]# sError + quit(retcode) + version = output[0].replace("\n", "") + except OSError, ose: + print >>sys.stderr, "Execution failed:", ose + quit(2) + + eventDetail= "program=\"sanitizeNames\"; version=\"" + version + "\"" + for line in lines: + detoxfiles = line.split(" -> ") + if len(detoxfiles) > 1 : + oldfile = detoxfiles[0].split('\n',1)[0] + newfile = detoxfiles[1] + #print "line: ", line + if os.path.isfile(newfile): + oldfile = oldfile.replace(objectsDirectory, relativeReplacement, 1) + newfile = newfile.replace(objectsDirectory, relativeReplacement, 1) + print oldfile, " -> ", newfile + + if groupType == "%SIPDirectory%": + updateFileLocation(oldfile, newfile, "name cleanup", date, "prohibited characters removed:" + eventDetail, fileUUID=None, sipUUID=sipUUID) + elif groupType == "%transferDirectory%": + updateFileLocation(oldfile, newfile, "name cleanup", date, "prohibited characters removed:" + eventDetail, fileUUID=None, transferUUID=sipUUID) + else: + print >>sys.stderr, "bad group type", groupType + exit(3) + + elif os.path.isdir(newfile): + oldfile = oldfile.replace(objectsDirectory, relativeReplacement, 1) + "/" + newfile = newfile.replace(objectsDirectory, relativeReplacement, 1) + "/" + directoryContents = [] + + sql = "SELECT fileUUID, currentLocation FROM Files WHERE Files.removedTime = 0 AND Files.currentLocation LIKE '" + MySQLdb.escape_string(oldfile.replace("\\", "\\\\")).replace("%","\%") + "%' AND " + groupSQL + " = '" + groupID + "';" + + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + fileUUID = row[0] + oldPath = row[1] + newPath = unicodeToStr(oldPath).replace(oldfile, newfile, 1) + directoryContents.append((fileUUID, oldPath, newPath)) + row = c.fetchone() + sqlLock.release() + + print oldfile, " -> ", newfile + + for fileUUID, oldPath, newPath in directoryContents: + updateFileLocation(oldPath, newPath, "name cleanup", date, "prohibited characters removed:" + eventDetail, fileUUID=fileUUID) + diff --git a/src/MCPClient/lib/clientScripts/sanitizeSIPName.py b/src/MCPClient/lib/clientScripts/sanitizeSIPName.py new file mode 100755 index 0000000000..0d1e20ca5c --- /dev/null +++ b/src/MCPClient/lib/clientScripts/sanitizeSIPName.py @@ -0,0 +1,48 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +#import os +from archivematicaMoveSIP import moveSIP +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +sys.path.append("/usr/lib/sanitizeNames") +from sanitizeNames import sanitizePath + + +DetoxDic={} + +if __name__ == '__main__': + SIPDirectory = sys.argv[1] + sipUUID = sys.argv[2] + date = sys.argv[3] + sharedDirectoryPath = sys.argv[4] + #os.path.abspath(SIPDirectory) + + dst = sanitizePath(SIPDirectory) + if SIPDirectory != dst: + dst = dst.replace(sharedDirectoryPath, "%sharedPath%", 1) + print SIPDirectory.replace(sharedDirectoryPath, "%sharedPath%", 1) + " -> " + dst + sql = """UPDATE SIPs SET currentPath='""" + dst + """' WHERE sipUUID='""" + sipUUID + """';""" + databaseInterface.runSQL(sql) diff --git a/src/MCPClient/lib/clientScripts/setDirectoryPermissionsForAppraisal.sh b/src/MCPClient/lib/clientScripts/setDirectoryPermissionsForAppraisal.sh new file mode 100755 index 0000000000..0d344f608f --- /dev/null +++ b/src/MCPClient/lib/clientScripts/setDirectoryPermissionsForAppraisal.sh @@ -0,0 +1,43 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +#source /etc/archivematica/archivematicaConfig.conf + +target="$1" +if [ -e "${target}" ]; then + sudo chown -R archivematica:archivematica "${target}" + echo `basename "${target}"` owned by "archivematica:archivematica" now + chmod -R 750 "${target}" + chmod 770 "${target}" + if [ -d "${target}objects" ]; then + chmod -R 770 "${target}objects" + fi + if [ -d "${target}metadata" ]; then + chmod -R 770 "${target}metadata" + fi +else + echo $target does not exist\ 1>&2 + exit 1 +fi + diff --git a/src/MCPClient/lib/clientScripts/sha256deepRelative.sh b/src/MCPClient/lib/clientScripts/sha256deepRelative.sh new file mode 100755 index 0000000000..f4217b2583 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/sha256deepRelative.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +set -e +cd "$1" +sha256deep "$2" "$3" "$4" "$5" "$6" +exit $? diff --git a/src/MCPClient/lib/clientScripts/storeAIP.py b/src/MCPClient/lib/clientScripts/storeAIP.py new file mode 100755 index 0000000000..225f1954cb --- /dev/null +++ b/src/MCPClient/lib/clientScripts/storeAIP.py @@ -0,0 +1,104 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ +import sys +import os +import stat +import shutil +import MySQLdb +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from executeOrRunSubProcess import executeOrRun +import databaseInterface + + +printSubProcessOutput=True + +AIPsStore = sys.argv[1] +AIP = sys.argv[2] +SIPUUID = sys.argv[3] +HTMLFilePath = sys.argv[4] +SIPNAME = sys.argv[5] +SIPDATE = sys.argv[6] + +#Get the UUID quads +uuidQuads = [] +SIPUUIDStripped = SIPUUID.replace("-","") +uuidQuads.append(SIPUUIDStripped[:4]) +uuidQuads.append(SIPUUIDStripped[4:7]) +uuidQuads.append(SIPUUIDStripped[8:12]) +uuidQuads.append(SIPUUIDStripped[12:16]) +uuidQuads.append(SIPUUIDStripped[16:20]) +uuidQuads.append(SIPUUIDStripped[20:24]) +uuidQuads.append(SIPUUIDStripped[24:28]) +uuidQuads.append(SIPUUIDStripped[28:32]) + +AIPsStoreWithQuads = AIPsStore +mode= stat.S_IWUSR + stat.S_IRUSR + stat.S_IXUSR + stat.S_IRGRP + stat.S_IXGRP + stat.S_IXOTH + stat.S_IROTH +for quad in uuidQuads: + AIPsStoreWithQuads = AIPsStoreWithQuads + quad + "/" + if not os.path.isdir(AIPsStoreWithQuads): + os.mkdir(AIPsStoreWithQuads, mode) + #mode isn't working on the mkdir + os.chmod(AIPsStoreWithQuads, mode) + +storeLocation=os.path.join(AIPsStoreWithQuads, os.path.basename(os.path.abspath(AIP))) + +#Store the AIP +shutil.move(AIP, storeLocation) + +#Extract the AIP +extractDirectory = "/tmp/" + SIPUUID + "/" +os.makedirs(extractDirectory) +# +command = "7z x -bd -o\"" + extractDirectory + "\" \"" + storeLocation + "\"" +ret = executeOrRun("command", command, printing=printSubProcessOutput) +exitCode, stdOut, stdErr = ret +if exitCode != 0: + print >>sys.stderr, "Error extracting" + quit(1) + +bag = extractDirectory + SIPNAME + "-" + SIPUUID + "/" +verificationCommands = [] +verificationCommands.append("/usr/share/bagit/bin/bag verifyvalid \"" + bag + "\"") +verificationCommands.append("/usr/share/bagit/bin/bag checkpayloadoxum \"" + bag + "\"") +verificationCommands.append("/usr/share/bagit/bin/bag verifycomplete \"" + bag + "\"") +verificationCommands.append("/usr/share/bagit/bin/bag verifypayloadmanifests \"" + bag + "\"") +verificationCommands.append("/usr/share/bagit/bin/bag verifytagmanifests \"" + bag + "\"") +exitCode = 0 +for command in verificationCommands: + ret = executeOrRun("command", command, printing=printSubProcessOutput) + exit, stdOut, stdErr = ret + if exit != 0: + print >>sys.stderr, "Failed test: ", command + exitCode=1 + else: + print >>sys.stderr, "Passed test: ", command + +#cleanup +shutil.rmtree(extractDirectory) + +#write to database +sql = """INSERT INTO AIPs (sipUUID, sipName, sipDate, filePath) VALUES ('%s', '%s', '%s', '%s')""" % (MySQLdb.escape_string(SIPUUID), MySQLdb.escape_string(SIPNAME), MySQLdb.escape_string(SIPDATE), MySQLdb.escape_string(storeLocation)) +databaseInterface.runSQL(sql) + +quit(exitCode) diff --git a/src/MCPClient/lib/clientScripts/upload-contentDM.py b/src/MCPClient/lib/clientScripts/upload-contentDM.py new file mode 100755 index 0000000000..100f8f7f58 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/upload-contentDM.py @@ -0,0 +1,118 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Mark Jordan <EMAIL@EMAIL.email> +# @version svn: $Id$ + +import os +import stat +import glob +import argparse +import json +import urllib + +def getDestinationImportDirectory(targetCollection, contentdmServer): + try: + CollectionParametersUrl = 'http://' + contentdmServer + '/dmwebservices/index.php?q=dmGetCollectionParameters' + targetCollection + '/json' + f = urllib.urlopen(CollectionParametersUrl) + collectionParametersString = f.read() + collectionParameters = json.loads(collectionParametersString) + except: + print >>sys.stderr, "Cannot retrieve CONTENTdm collection parameters from " + CollectionParametersUrl + quit(1) + + return collectionParameters['path'] + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='restructure') + parser.add_argument('--uuid', action="store", dest='uuid', metavar='UUID', help='AIP-UUID') + parser.add_argument('--server', action="store", dest='contentdmServer', metavar='server', + help='Target CONTENTdm server') + parser.add_argument('--username', action="store", dest='contentdmUser', metavar='server', + help='Username for rsyncing the DIP to the CONTENTdm server') + parser.add_argument('--group', action="store", dest='contentdmGroup', metavar='server', + help='Group (numeric) ID for rsyncing the DIP to the CONTENTdm server') + parser.add_argument('--collection', action="store", dest='targetCollection', + metavar='targetCollection', help='Target CONTENTdm Collection') + parser.add_argument('--outputDir', action="store", dest='outputDir', metavar='outputDir', + help='The location of the restructured DIPs') + + args = parser.parse_args() + + contentdmCollectionDirectory = getDestinationImportDirectory(args.targetCollection, args.contentdmServer) + + # Determine if the package is for a simple item or a compound item by counting the + # number of .desc files in the DIP directory. If it's simple, append 'import' to the + # end of destinationImportDirectory; if it's compound, append 'import/cdoc' to the end. + sourceDescFiles = glob.glob(os.path.join(args.outputDir, 'CONTENTdm', 'directupload', args.uuid, "*.desc")) + if len(sourceDescFiles) > 1: + packageType = 'compound' + else: + packageType = 'simple' + + if packageType is 'compound': + destinationImportDirectory = os.path.join(contentdmCollectionDirectory, 'import', 'cdoc', args.uuid) + else: + destinationImportDirectory = os.path.join(contentdmCollectionDirectory, 'import') + + # We need to remove the port, if any, from server. + server, sep, port = args.contentdmServer.partition(':') + destPath = args.contentdmUser + '@' + server + ':' + destinationImportDirectory + + # If we're uploading a compound item package, we need to create a directory for it in cdoc + # and make it group writable. + if packageType is 'compound': + sshLogin = args.contentdmUser + "@" + server + sshMkdirCmd = 'mkdir' + sshChmodCmd = 'chmod g+rw' + sshChgrpCmd = 'chgrp' + sshCmd = 'ssh %s "%s %s && %s %s && %s %s %s"' % (sshLogin, sshMkdirCmd, destinationImportDirectory, sshChmodCmd, destinationImportDirectory, sshChgrpCmd, args.contentdmGroup, destinationImportDirectory) + sshExitCode = os.system(sshCmd) + if sshExitCode != 0: + print "Error setting attributes of file " + destPath + quit(1) + print "sshCmd : " + sshCmd + + sourceDir = os.path.join(args.outputDir, 'CONTENTdm', 'directupload', args.uuid) + # For each file in the source DIP directory, rsync it up to the CONTENTdm server. + for sourceFile in glob.glob(os.path.join(sourceDir, "*.*")): + sourcePath, sourceFilename = os.path.split(sourceFile) + rsyncDestPath = args.contentdmUser + "@" + server + ":" + os.path.join(destinationImportDirectory, sourceFilename) + rsyncCmd = "rsync %s %s" % (sourceFile, rsyncDestPath) + rsyncExitCode = os.system(rsyncCmd) + if rsyncExitCode != 0: + print "Error copying direct upload package to " + destPath + quit(1) + print "rsyncCmd: " + rsyncCmd + + # Change the permissions and group of the DIP files so they are correct on the CONTENTdm + sshLogin = args.contentdmUser + "@" + server + remoteDestPath = os.path.join(destinationImportDirectory, sourceFilename) + sshChgrpCmd = 'chgrp ' + args.contentdmGroup + sshChmodCmd = 'chmod g+rw' + sshCmd = 'ssh %s "%s %s && %s %s"' % (sshLogin, sshChgrpCmd, remoteDestPath, sshChmodCmd, remoteDestPath) + sshExitCode = os.system(sshCmd) + if sshExitCode != 0: + print "Error setting attributes of file " + destPath + quit(1) + print "sshCmd : " + sshCmd + diff --git a/src/MCPClient/lib/clientScripts/verifyAndRestructureTransferBag.py b/src/MCPClient/lib/clientScripts/verifyAndRestructureTransferBag.py new file mode 100755 index 0000000000..4928c3bfd4 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/verifyAndRestructureTransferBag.py @@ -0,0 +1,77 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from executeOrRunSubProcess import executeOrRun +from restructureForCompliance import restructureBagForComplianceFileUUIDsAssigned +from databaseFunctions import insertIntoEvents +import databaseInterface + +printSubProcessOutput=False +exitCode = 0 +verificationCommands = [] +verificationCommandsOutputs = [] + +def verifyBag(bag): + global exitCode + verificationCommands = [ + "/usr/share/bagit/bin/bag verifyvalid \"" + bag + "\"", + "/usr/share/bagit/bin/bag checkpayloadoxum \"" + bag + "\"", + "/usr/share/bagit/bin/bag verifycomplete \"" + bag + "\"", + "/usr/share/bagit/bin/bag verifypayloadmanifests \"" + bag + "\"", + "/usr/share/bagit/bin/bag verifytagmanifests \"" + bag + "\"" ] + for command in verificationCommands: + ret = executeOrRun("command", command, printing=printSubProcessOutput) + verificationCommandsOutputs.append(ret) + exit, stdOut, stdErr = ret + if exit != 0: + print >>sys.stderr, "Failed test: ", command + print >>sys.stderr, stdErr + print >>sys.stderr + exitCode += 1 + else: + print "Passed test: ", command + +if __name__ == '__main__': + target = sys.argv[1] + transferUUID = sys.argv[2] + verifyBag(target) + if exitCode != 0: + print >>sys.stderr, "Failed bagit compliance. Not restructuring." + exit(exitCode) + restructureBagForComplianceFileUUIDsAssigned(target, transferUUID, "transferUUID") + for i in range(len(verificationCommands)): + print verificationCommands[i] + print verificationCommandsOutputs[i] + print + + sql = "SELECT Files.fileUUID FROM Files WHERE removedTime = 0 AND Files.currentLocation LIKE '\%transferDirectory\%objects/%' AND transferUUID = '" + transferUUID + "';" + rows = databaseInterface.queryAllSQL(sql) + for row in rows: + insertIntoEvents(fileUUID=row[0], \ + eventType="fixity check", \ + eventDetail="Bagit - verifypayloadmanifests", \ + eventOutcome="Pass") + + exit(exitCode) diff --git a/src/MCPClient/lib/clientScripts/verifyBAG.py b/src/MCPClient/lib/clientScripts/verifyBAG.py new file mode 100755 index 0000000000..d92824f587 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/verifyBAG.py @@ -0,0 +1,46 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from executeOrRunSubProcess import executeOrRun + +printSubProcessOutput=True + +bag = sys.argv[1] +verificationCommands = [] +verificationCommands.append("/usr/share/bagit/bin/bag verifyvalid " + bag) +verificationCommands.append("/usr/share/bagit/bin/bag checkpayloadoxum " + bag) +verificationCommands.append("/usr/share/bagit/bin/bag verifycomplete " + bag) +verificationCommands.append("/usr/share/bagit/bin/bag verifypayloadmanifests " + bag) +verificationCommands.append("/usr/share/bagit/bin/bag verifytagmanifests " + bag) +exitCode = 0 +for command in verificationCommands: + ret = executeOrRun("command", command, printing=printSubProcessOutput) + exit, stdOut, stdErr = ret + if exit != 0: + print >>sys.stderr, "Failed test: ", command + exitCode=1 + else: + print >>sys.stderr, "Passed test: ", command +quit(exitCode) diff --git a/src/MCPClient/lib/clientScripts/verifyChecksumsInFileSecOfDspaceMETSFiles.py b/src/MCPClient/lib/clientScripts/verifyChecksumsInFileSecOfDspaceMETSFiles.py new file mode 100755 index 0000000000..2345bee808 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/verifyChecksumsInFileSecOfDspaceMETSFiles.py @@ -0,0 +1,99 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ +import os +import sys +import lxml.etree as etree +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +#import databaseInterface +from externals.checksummingTools import sha_for_file +from externals.checksummingTools import md5_for_file + +def verifyMetsFileSecChecksums(metsFile, date, taskUUID, relativeDirectory="./"): + print metsFile + exitCode = 0 + tree = etree.parse(metsFile) + root = tree.getroot() + for item in root.findall("{http://www.loc.gov/METS/}fileSec/{http://www.loc.gov/METS/}fileGrp/{http://www.loc.gov/METS/}file"): + #print etree.tostring(item) + #print item + + checksum = item.get("CHECKSUM") + checksumType = item.get("CHECKSUMTYPE") + for item2 in item: + if item2.tag == "{http://www.loc.gov/METS/}FLocat": + #print "floc: ", item2.tag, etree.tostring(item2) + #print item2.attrib + fileLocation = item2.get("{http://www.w3.org/1999/xlink}href") + #print "%s - %s - %s " % (checksumType, checksum, fileLocation) + fileFullPath = os.path.join(relativeDirectory, fileLocation) + if checksumType == "MD5": + checksum2 = md5_for_file(fileFullPath) + eventDetail = "program=\"python\"; module=\"hashlib.sha256()\"" + elif checksumType == "sha256": + checksum2 = sha_for_file(fileFullPath) + eventDetail = "program=\"python\"; module=\"hashlib.md5()\"" + else: + print >>sys.stderr, "Unsupported checksum type: %s" % (checksumType.__str__()) + exit(300) + + + if checksum != checksum2: + #eventOutcomeDetailNote = checksumFile.__str__() + " != " + checksumDB.__str__() + eventOutcome="Fail" + print "%s - %s - %s" % ((checksum == checksum2).__str__(), checksum.__str__(), checksum2.__str__()) + print >>sys.stderr, eventOutcome, fileFullPath + exitCode = exitCode + 22 + else: + #eventOutcomeDetailNote = checksumFile.__str__() + "verified" + eventOutcome="Pass" + print eventOutcome, fileLocation + + + + + + + return exitCode + + #insertIntoEvents(fileUUID="", eventIdentifierUUID="", eventType="", eventDateTime=databaseInterface.getUTCDate(), eventDetail="", eventOutcome="", eventOutcomeDetailNote="") + databaseFunctions.insertIntoEvents(fileUUID=fileUUID, \ + eventIdentifierUUID=eventIdentifierUUID, \ + eventType="fixity check", \ + eventDateTime=date, \ + eventOutcome=eventOutcome, \ + eventOutcomeDetailNote=eventOutcomeDetailNote, \ + eventDetail=eventDetail) + + + + + +if __name__ == '__main__': + metsFile = sys.argv[1] + date = sys.argv[2] + taskUUID = sys.argv[3] + + + ret = verifyMetsFileSecChecksums(metsFile, date, taskUUID, relativeDirectory=os.path.dirname(metsFile) + "/") + quit(ret) diff --git a/src/MCPClient/lib/clientScripts/verifyMD5.sh b/src/MCPClient/lib/clientScripts/verifyMD5.sh new file mode 100755 index 0000000000..6b70892361 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/verifyMD5.sh @@ -0,0 +1,72 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +checkMD5NoGui="`dirname $0`/archivematicaCheckMD5NoGUI.sh" + +target="$1" +checksums="$2" +date="$3" +eventID="$4" +transferUUID="$5" + + +# md5deep - Compute and compare MD5 message digests +# sha1deep - Compute and compare SHA-1 message digests +# sha256deep - Compute and compare SHA-256 message digests +# tigerdeep - Compute and compare Tiger message digests +# whirlpooldeep - Compute and compare Whirlpool message digests + +ret=0 + +MD5FILE="${target}metadata/${checksums}.md5" +if [ -f "${MD5FILE}" ]; then + "${checkMD5NoGui}" "${target}objects/" "${MD5FILE}" "${target}logs/`basename "${MD5FILE}"`-Check-`date`" "md5deep" && \ + "`dirname $0`/createEventsForGroup.py" --groupUUID "${transferUUID}" --groupType "transferUUID" --eventType "fixity" --eventDateTime "$date" --eventOutcome "Pass" --eventDetail "`md5deep -v` md5deep ${target}" + ret+="$?" +else + echo "File Does not exist:" "${MD5FILE}" +fi + +SHA1FILE="${target}metadata/${checksums}.sha1" +if [ -f "${SHA1FILE}" ]; then + "${checkMD5NoGui}" "${target}objects/" "${SHA1FILE}" "${target}logs/`basename "${SHA1FILE}"`-Check-`date`" "sha1deep" && \ + "`dirname $0`/createEventsForGroup.py" --groupUUID "${transferUUID}" --groupType "transferUUID" --eventType "fixity" --eventDateTime "$date" --eventOutcome "Pass" --eventDetail "`sha1deep -v` sha1deep ${target}" + ret+="$?" +else + echo "File Does not exist:" "${SHA1FILE}" +fi + +SHA256FILE="${target}metadata/${checksums}.sha256" +if [ -f "${SHA256FILE}" ]; then + "${checkMD5NoGui}" "${target}objects/" "${SHA256FILE}" "${target}logs/`basename "${SHA256FILE}"`-Check-`date`" "sha256deep" && \ + "`dirname $0`/createEventsForGroup.py" --groupUUID "${transferUUID}" --groupType "transferUUID" --eventType "fixity" --eventDateTime "$date" --eventOutcome "Pass" --eventDetail "`sha256deep -v` sha256deep ${target}" + ret+="$?" +else + echo "File Does not exist:" "${SHA256FILE}" +fi + + +exit ${ret} + + diff --git a/src/MCPClient/lib/clientScripts/verifyPREMISChecksums.py b/src/MCPClient/lib/clientScripts/verifyPREMISChecksums.py new file mode 100755 index 0000000000..367eab7d85 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/verifyPREMISChecksums.py @@ -0,0 +1,83 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ +import sys +import os +from optparse import OptionParser +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +import databaseFunctions +from externals.checksummingTools import sha_for_file + +def verifyChecksum(fileUUID, filePath, date, eventIdentifierUUID): + sql = """SELECT checksum FROM Files WHERE fileUUID = '""" + fileUUID + "'" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + checksumDB = "" + while row != None: + checksumDB = row[0] + row = c.fetchone() + sqlLock.release() + if checksumDB == None or checksumDB == "" or checksumDB == "None": + print >>sys.stderr, "No checksum found in database for file:", fileUUID, filePath + exit(1) + checksumFile = sha_for_file(filePath) + + eventOutcome="" + eventOutcomeDetailNote="" + exitCode = 0 + if checksumFile != checksumDB: + eventOutcomeDetailNote = checksumFile.__str__() + " != " + checksumDB.__str__() + eventOutcome="Fail" + exitCode = 2 + print >>sys.stderr, "Checksums do not match:", fileUUID, filePath + print >>sys.stderr, eventOutcomeDetailNote + else: + eventOutcomeDetailNote = checksumFile.__str__() + "verified" + eventOutcome="Pass" + exitCode = 0 + + #insertIntoEvents(fileUUID="", eventIdentifierUUID="", eventType="", eventDateTime=databaseInterface.getUTCDate(), eventDetail="", eventOutcome="", eventOutcomeDetailNote="") + databaseFunctions.insertIntoEvents(fileUUID=fileUUID, \ + eventIdentifierUUID=eventIdentifierUUID, \ + eventType="fixity check", \ + eventDateTime=date, \ + eventOutcome=eventOutcome, \ + eventOutcomeDetailNote=eventOutcomeDetailNote, \ + eventDetail="program=\"python\"; module=\"hashlib.sha256()\"") + + exit(exitCode) + + +if __name__ == '__main__': + parser = OptionParser() + parser.add_option("-i", "--fileUUID", action="store", dest="fileUUID", default="") + parser.add_option("-p", "--filePath", action="store", dest="filePath", default="") + parser.add_option("-d", "--date", action="store", dest="date", default="") + parser.add_option("-u", "--eventIdentifierUUID", action="store", dest="eventIdentifierUUID", default="") + (opts, args) = parser.parse_args() + + verifyChecksum(opts.fileUUID, \ + opts.filePath, \ + opts.date, \ + opts.eventIdentifierUUID) diff --git a/src/MCPClient/lib/clientScripts/verifySIPCompliance.py b/src/MCPClient/lib/clientScripts/verifySIPCompliance.py new file mode 100755 index 0000000000..fd1f5f7b45 --- /dev/null +++ b/src/MCPClient/lib/clientScripts/verifySIPCompliance.py @@ -0,0 +1,74 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ +import os +import sys + +requiredDirectories = ["objects", \ + "logs", \ + "metadata",\ + "metadata/submissionDocumentation"] +allowableFiles = ["processingMCP.xml"] + +def checkDirectory(directory, ret=0): + try: + for directory, subDirectories, files in os.walk(directory): + for file in files: + filePath = os.path.join(directory, file) + except Exception as inst: + print >>sys.stderr, "Error navigating directory:", directory.__str__() + print >>sys.stderr, type(inst) + print >>sys.stderr, inst.args + ret += 1 + return ret + +def verifyDirectoriesExist(SIPDir, ret=0): + for directory in requiredDirectories: + if not os.path.isdir(os.path.join(SIPDir, directory)): + print >>sys.stderr, "Required Directory Does Not Exist: " + directory + ret += 1 + return ret + +def verifyNothingElseAtTopLevel(SIPDir, ret=0): + for entry in os.listdir(SIPDir): + if os.path.isdir(os.path.join(SIPDir, entry)): + if entry not in requiredDirectories: + print >>sys.stderr, "Error, directory exists: " + entry + ret += 1 + else: + if entry not in allowableFiles: + print >>sys.stderr, "Error, file exists: " + entry + ret += 1 + return ret + + + +if __name__ == '__main__': + SIPDir = sys.argv[1] + ret = verifyDirectoriesExist(SIPDir) + ret = verifyNothingElseAtTopLevel(SIPDir, ret) + ret = checkDirectory(SIPDir, ret) + if ret != 0: + import time + time.sleep(10) + quit(ret) diff --git a/src/MCPClient/lib/clientScripts/verifyTransferCompliance.py b/src/MCPClient/lib/clientScripts/verifyTransferCompliance.py new file mode 100755 index 0000000000..1b034f8a5e --- /dev/null +++ b/src/MCPClient/lib/clientScripts/verifyTransferCompliance.py @@ -0,0 +1,61 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaClientScript +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ +import os +import sys +from verifySIPCompliance import checkDirectory + +requiredDirectories = ["objects", \ + "logs", \ + "metadata",\ + "metadata/submissionDocumentation"] +allowableFiles = ["processingMCP.xml"] + +def verifyDirectoriesExist(SIPDir, ret=0): + for directory in requiredDirectories: + if not os.path.isdir(os.path.join(SIPDir, directory)): + print >>sys.stderr, "Required Directory Does Not Exist: " + directory + ret += 1 + return ret + +def verifyNothingElseAtTopLevel(SIPDir, ret=0): + for entry in os.listdir(SIPDir): + if os.path.isdir(os.path.join(SIPDir, entry)): + if entry not in requiredDirectories: + print >>sys.stderr, "Error, directory exists: " + entry + ret += 1 + else: + if entry not in allowableFiles: + print >>sys.stderr, "Error, file exists: " + entry + ret += 1 + return ret + +if __name__ == '__main__': + SIPDir = sys.argv[1] + ret = verifyDirectoriesExist(SIPDir) + ret = verifyNothingElseAtTopLevel(SIPDir, ret) + ret = checkDirectory(SIPDir, ret) + if ret != 0: + import time + time.sleep(10) + quit(ret) diff --git a/src/MCPClient/lib/transcoder.py b/src/MCPClient/lib/transcoder.py new file mode 100755 index 0000000000..aaccdf7e4f --- /dev/null +++ b/src/MCPClient/lib/transcoder.py @@ -0,0 +1,179 @@ +#!/usr/bin/python -OO +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaClient +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ +import re +import math +import sys +import os +import time +from pipes import quote +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from executeOrRunSubProcess import executeOrRun +from fileOperations import updateSizeAndChecksum +from archivematicaFunctions import escapeForCommand +import databaseInterface +LowerEndMainGroupMax = -10 + +commandObjects = {} +groupObjects = {} +commandLinkerObjects = {} + +def toStrFromUnicode(inputString, encoding='utf-8'): + """Converts to str, if it's unicode input type.""" + if isinstance(inputString, unicode): + inputString = inputString.encode('utf-8') + return inputString + + +class Command: + def __init__(self, commandID, replacementDic, onSuccess=None, opts=None): + self.pk = commandID + self.replacementDic = replacementDic + self.onSuccess = onSuccess + self.stdOut = "" + self.stdErr = "" + self.exitCode=None + self.failedCount=0 + self.opts = opts + sql = """SELECT CT.type, C.verificationCommand, C.eventDetailCommand, C.command, C.outputLocation, C.description + FROM Commands AS C + JOIN CommandTypes AS CT ON C.commandType = CT.pk + WHERE C.pk = """ + commandID.__str__() + """ + ;""" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + rowSTR = [] + for colIndex in range(len(row)): + rowSTR.append(toStrFromUnicode(row[colIndex])) + self.type, \ + self.verificationCommand, \ + self.eventDetailCommand, \ + self.command, \ + self.outputLocation, \ + self.description = \ + rowSTR + if isinstance(self.command, unicode): + self.command = self.command.encode('utf-8') + row = c.fetchone() + sqlLock.release() + if self.verificationCommand: + self.verificationCommand = Command(self.verificationCommand, replacementDic) + self.verificationCommand.command = self.verificationCommand.command.replace("%outputLocation%", self.outputLocation) + + if self.eventDetailCommand: + self.eventDetailCommand = Command(self.eventDetailCommand, replacementDic) + self.eventDetailCommand.command = self.eventDetailCommand.command.replace("%outputLocation%", self.outputLocation) + + def __str__(self): + if self.verificationCommand: + return "[COMMAND]\n" + \ + "PK: " + self.pk.__str__() + "\n" + \ + "Type: " + self.type.__str__() + "\n" + \ + "command: " + self.command.__str__() + "\n" + \ + "description: " + self.description.__str__() + "\n" + \ + "outputLocation: " + self.outputLocation.__str__() + "\n" + \ + "verificationCommand: " + self.verificationCommand.pk.__str__() + else: + return "[COMMAND]\n" + \ + "PK: " + self.pk.__str__() + "\n" + \ + "Type: " + self.type.__str__() + "\n" + \ + "command: " + self.command.__str__() + "\n" + \ + "description: " + self.description.__str__() + "\n" + \ + "outputLocation: " + self.outputLocation.__str__() + "\n" + \ + "verificationCommand: " + self.verificationCommand.__str__() + + def execute(self, skipOnSuccess=False): + #for each key replace all instances of the key in the command string + for key, value in self.replacementDic.iteritems(): + key = toStrFromUnicode(key) + self.replacementDic[key] = toStrFromUnicode(value) + #self.outputLocation = toStrFromUnicode(self.outputLocation) + #self.command = self.command.replace ( key, quote(replacementDic[key]) ) + self.command = self.command.replace( key, escapeForCommand(self.replacementDic[key]) ) + if self.outputLocation: + self.outputLocation = self.outputLocation.replace( key, self.replacementDic[key] ) + print "Running: " + selfstr = self.__str__() + print selfstr + if self.opts: + self.opts["prependStdOut"] += "\r\nRunning: \r\n%s" % (selfstr) + + self.exitCode, self.stdOut, self.stdError = executeOrRun(self.type, self.command) + + + if (not self.exitCode) and self.verificationCommand: + print + if self.opts: + self.opts["prependStdOut"] += "\r\n" + self.exitCode = self.verificationCommand.execute(skipOnSuccess=True) + + if (not self.exitCode) and self.eventDetailCommand: + self.eventDetailCommand.execute(skipOnSuccess=True) + + #If unsuccesful + if self.exitCode: + print >>sys.stderr, "Failed:" + #print >>sys.stderr, self.__str__() + print self.stdOut + print >>sys.stderr, self.stdError + if False and self.failedCount < 1: #retry count + self.failedCount= self.failedCount + 1 + time.sleep(2) + print >>sys.stderr, "retrying, ", self.failedCount + return self.execute(skipOnSuccess) + else: + if (not skipOnSuccess) and self.onSuccess: + self.onSuccess(self, self.opts, self.replacementDic) + return self.exitCode + +class CommandLinker: + def __init__(self, commandLinker, replacementDic, opts, onSuccess): + self.pk = commandLinker + self.replacementDic = replacementDic + self.opts = opts + self.onSuccess = onSuccess + sql = "SELECT command FROM CommandRelationships where pk = %s;" % (self.pk.__str__()) + rows = databaseInterface.queryAllSQL(sql) + if rows: + for row in rows: + self.command = row[0] + self.commandObject = Command(self.command.__str__(), replacementDic, self.onSuccess, opts) + + def __str__(self): + return "[Command Linker]\n" + \ + "PK: " + self.pk.__str__() + "\n" + \ + self.commandObject.__str__() + + def execute(self): + sql = "UPDATE CommandRelationships SET countAttempts=countAttempts+1 WHERE pk=" + self.pk.__str__() + ";" + databaseInterface.runSQL(sql) + ret = self.commandObject.execute() + if ret: + column = "countNotOK" + else: + column = "countOK" + sql = "UPDATE CommandRelationships SET " + column + "=" + column + "+1 WHERE pk=" + self.pk.__str__() + ";" + databaseInterface.runSQL(sql) + return ret + + diff --git a/src/MCPClient/lib/transcoderNormalizer.py b/src/MCPClient/lib/transcoderNormalizer.py new file mode 100755 index 0000000000..5d307c8ca0 --- /dev/null +++ b/src/MCPClient/lib/transcoderNormalizer.py @@ -0,0 +1,225 @@ +#!/usr/bin/python -OO +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaClient +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ +import cPickle +import traceback +import archivematicaClient +import transcoder +import uuid +import os +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from executeOrRunSubProcess import executeOrRun +import databaseInterface +from fileOperations import addFileToSIP +from fileOperations import updateSizeAndChecksum +from databaseFunctions import insertIntoEvents +from databaseFunctions import insertIntoDerivations + + +def executeCommandReleationship(gearman_worker, gearman_job): + try: + execute = gearman_job.task + print "executing:", execute, "{", gearman_job.unique, "}" + data = cPickle.loads(gearman_job.data) + utcDate = databaseInterface.getUTCDate() + opts = data["arguments"]#.encode("utf-8") + #if isinstance(arguments, unicode): + # arguments = arguments.encode("utf-8") + #if isinstance(arguments, str): + # arguments = unicode(arguments) + + sInput = "" + clientID = gearman_worker.worker_client_id + + opts["date"] = utcDate + opts["accessDirectory"] = os.path.join(opts['sipPath'], "DIP/objects") + "/" + opts["thumbnailDirectory"] = os.path.join(opts['sipPath'], "thumbnails") + "/" + print opts + for key, value in archivematicaClient.replacementDic.iteritems(): + for key2 in opts: + opts[key2] = opts[key2].replace(key, value) + replacementDic = getReplacementDic(opts) + #if True: + opts["prependStdOut"] = """Operating on file: {%s}%s \r\nUsing %s command classifications""" % (opts["fileUUID"], replacementDic["%fileName%"], opts["commandClassification"]) + opts["prependStdError"] = "" + # print clientID, execute, data + archivematicaClient.logTaskAssignedSQL(gearman_job.unique.__str__(), clientID, utcDate) + cl = transcoder.CommandLinker(opts["CommandRelationship"], replacementDic, opts, onceNormalized) + cl.execute() + + co = cl.commandObject + exitCode = co.exitCode + stdOut = "%s \r\n%s" % (opts["prependStdOut"], co.stdOut) + stdError = "%s \r\n%s" % (opts["prependStdError"], co.stdError) + + #TODO add date to ops + + + #Replace replacement strings + #archivematicaClient.printOutputLock.acquire() + #print >>sys.stderr, "<processingCommand>{" + gearman_job.unique + "}" + command.__str__() + "</processingCommand>" + #archivematicaClient.printOutputLock.release() + #exitCode, stdOut, stdError = executeOrRun("command", command, sInput, printing=False) + return cPickle.dumps({"exitCode" : exitCode, "stdOut": stdOut, "stdError": stdError}) + #catch OS errors + except OSError, ose: + archivematicaClient.printOutputLock.acquire() + traceback.print_exc(file=sys.stdout) + print >>sys.stderr, "Execution failed:", ose + archivematicaClient.printOutputLock.release() + output = ["Config Error!", ose.__str__() ] + exitCode = 1 + return cPickle.dumps({"exitCode" : exitCode, "stdOut": output[0], "stdError": output[1]}) + except: + archivematicaClient.printOutputLock.acquire() + traceback.print_exc(file=sys.stdout) + print sys.exc_info().__str__() + print "Unexpected error:", sys.exc_info()[0] + archivematicaClient.printOutputLock.release() + output = ["", sys.exc_info().__str__()] + return cPickle.dumps({"exitCode" : -1, "stdOut": output[0], "stdError": output[1]}) + + +def getReplacementDic(opts): + ret = {} + prefix = "" + postfix = "" + outputDirectory = "" + #get file name and extension + s = opts["inputFile"] + #get indexes for python string array + #index of next char after last / + x1 = s.rfind('/')+1 + #index of last . + x2 = s.rfind('.') + #index of next char after last . + x2mod = x2+1 + #length of s + sLen = len(s) + + if x2 < x1: + x2mod = 0 + + + fileDirectory = os.path.dirname(s) + "/" + if x2mod != 0: + fileExtension = s[x2mod:sLen] + fileTitle = s[x1:x2] + fileFullName = fileDirectory + fileTitle + "." + fileExtension + else: + #print "No file extension!" + fileExtension = "" + fileTitle = s[x1:sLen] + fileFullName = fileDirectory + fileTitle + fileExtensionWithDot = "." + fileExtension + if fileExtension == "": + fileExtensionWithDot = "" + + sql = """SELECT CommandClassifications.classification FROM CommandRelationships JOIN CommandClassifications ON CommandRelationships.commandClassification = CommandClassifications.pk WHERE CommandRelationships.pk = %s;""" % (opts["CommandRelationship"]) + rows = databaseInterface.queryAllSQL(sql) + if rows: + for row in rows: + opts["commandClassification"] = row[0] + if row[0] == "preservation": + postfix = "-" + opts["taskUUID"] + outputFileUUID = opts["taskUUID"] + outputDirectory = fileDirectory + elif row[0] == "access": + prefix = opts["fileUUID"] + "-" + outputDirectory = opts["accessDirectory"] + elif row[0] == "thumbnail": + outputDirectory = opts["thumbnailDirectory"] + postfix = opts["fileUUID"] + else: + print >>sys.stderr, "Unsupported command classification.", opts["CommandRelationship"], row[0] + return ret + else: + print >>sys.stderr, "Unsupported None command classification.", opts["CommandRelationship"] + + + + + ret["%inputFile%"]= fileFullName + ret["%outputDirectory%"] = outputDirectory + ret["%fileExtension%"] = fileExtension + ret["%fileExtensionWithDot%"] = fileExtensionWithDot + ret["%fileFullName%"] = fileFullName + ret["%preservationFileDirectory%"] = fileDirectory + ret["%fileDirectory%"] = fileDirectory + ret["%fileTitle%"] = fileTitle + ret["%fileName%"] = fileTitle + ret["%prefix%"] = prefix + ret["%postfix%"] = postfix + ret["%outputFileUUID%"] = opts["taskUUID"] + return ret + + +def onceNormalized(command, opts, replacementDic): + transcodedFiles = [] + if not command.outputLocation: + command.outputLocation = "" + if os.path.isfile(command.outputLocation): + transcodedFiles.append(command.outputLocation) + elif os.path.isdir(command.outputLocation): + for w in os.walk(command.outputLocation): + path, directories, files = w + for p in files: + p = os.path.join(path, p) + if os.path.isfile(p): + transcodedFiles.append(p) + elif command.outputLocation: + print >>sys.stderr, command + print >>sys.stderr, "Error - output file does not exist [" + command.outputLocation + "]" + command.exitCode = -2 + + derivationEventUUID = uuid.uuid4().__str__() + eventDetail = "" + if command.eventDetailCommand != None: + eventDetail = eventDetail=command.eventDetailCommand.stdOut + for ef in transcodedFiles: + if opts["commandClassifications"] == "preservation": + #Add the new file to the sip + filePathRelativeToSIP = ef.replace(opts["sipPath"], "%SIPDirectory%", 1) + # addFileToSIP(filePathRelativeToSIP, fileUUID, sipUUID, taskUUID, date, sourceType="ingestion"): + addFileToSIP(filePathRelativeToSIP, replacementDic["%outputFileUUID%"], opts["sipUUID"], uuid.uuid4().__str__(), opts["date"], sourceType="creation", use="preservation") + #Calculate new file checksum + #Add event information to current file + insertIntoEvents(fileUUID=opts["fileUUID"], \ + eventIdentifierUUID=derivationEventUUID, \ + eventType="normalization", \ + eventDateTime=opts["date"], \ + eventDetail=eventDetail, \ + eventOutcome="", \ + eventOutcomeDetailNote=filePathRelativeToSIP) + + updateSizeAndChecksum(replacementDic["%outputFileUUID%"], ef, opts["date"], uuid.uuid4().__str__()) + + #Add linking information between files + insertIntoDerivations(sourceFileUUID=opts["fileUUID"], derivedFileUUID=replacementDic["%outputFileUUID%"], relatedEventUUID=derivationEventUUID) + + replacementDic["%outputFileUUID%"] = uuid.uuid4().__str__() + replacementDic["%postfix%"] = "-" + replacementDic["%outputFileUUID%"] + + + + diff --git a/src/MCPClient/lib/transcoderObjects/access.py b/src/MCPClient/lib/transcoderObjects/access.py new file mode 100755 index 0000000000..9d7ece731f --- /dev/null +++ b/src/MCPClient/lib/transcoderObjects/access.py @@ -0,0 +1,65 @@ +#!/usr/bin/python -OO +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaClient +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +class accessTranscoderObject: + def __init__(self, commandID): + fileTitle = "" + fileExtension = "" + fileDirectory = "" + fileFullName = "" + + def getReplacementDic(self): + return {} + + +def setFileIn(fileIn=sys.argv[1]): + global fileTitle + global fileExtension + global fileDirectory + global fileFullName + #get file name and extension + s = fileIn + #get indexes for python string array + #index of next char after last / + x1 = s.rfind('/')+1 + #index of last . + x2 = s.rfind('.') + #index of next char after last . + x2mod = x2+1 + #length of s + sLen = len(s) + + if x2 < x1: + x2mod = 0 + + + fileDirectory = os.path.dirname(s) + "/" + if x2mod != 0: + fileExtension = s[x2mod:sLen] + fileTitle = s[x1:x2] + fileFullName = fileDirectory + fileTitle + "." + fileExtension + else: + #print "No file extension!" + fileExtension = "" + fileTitle = s[x1:sLen] + fileFullName = fileDirectory + fileTitle diff --git a/src/MCPServer/debian/archivematica-mcp-server.install b/src/MCPServer/debian/archivematica-mcp-server.install new file mode 100644 index 0000000000..a41cb6a7e1 --- /dev/null +++ b/src/MCPServer/debian/archivematica-mcp-server.install @@ -0,0 +1,6 @@ +etc/* /etc/archivematica/MCPServer/ +lib/* /usr/lib/archivematica/MCPServer/ +sharedDirectoryStructure/* /var/archivematica/sharedDirectory/ +init/* /etc/init/ +share/mysql /usr/share/dbconfig-common/data/archivematica-mcp-server/install/ +share/* /usr/share/archivematica/ diff --git a/src/MCPServer/debian/config b/src/MCPServer/debian/config new file mode 100644 index 0000000000..6f44545eaa --- /dev/null +++ b/src/MCPServer/debian/config @@ -0,0 +1,15 @@ +#!/bin/sh +# config maintainer script for archivematica-mcp-server + +# source debconf stuff +. /usr/share/debconf/confmodule + +dbc_dbname=MCP +dbc_dbuser=demo +dbc_dbpass=demo + +# source dbconfig-common shell library, and call the hook function +if [ -f /usr/share/dbconfig-common/dpkg/config.mysql ]; then + . /usr/share/dbconfig-common/dpkg/config.mysql + dbc_go archivematica-mcp-server $@ +fi diff --git a/src/MCPServer/debian/control b/src/MCPServer/debian/control new file mode 100644 index 0000000000..fe77686388 --- /dev/null +++ b/src/MCPServer/debian/control @@ -0,0 +1,14 @@ +Source: archivematica-mcp-server +Section: utils +Priority: extra +Maintainer: Austin Trask <austin@artefactual.com> +Build-Depends: debhelper (>= 7) +Standards-Version: 3.8.3 +Homepage: http://archivematica.org + +Package: archivematica-mcp-server +Architecture: any +Depends: ${shlibs:Depends}, ${misc:Depends}, dbconfig-common, logapp, mysql-server, python-pyinotify, python-gearman, python-mysqldb, python-lxml, gearman, uuid, archivematica-common +Description: MCP Server for Archivematica + Enter long description + diff --git a/src/MCPServer/debian/copyright b/src/MCPServer/debian/copyright new file mode 100644 index 0000000000..3906e99c09 --- /dev/null +++ b/src/MCPServer/debian/copyright @@ -0,0 +1,37 @@ +This work was packaged for Ubuntu by: + + Austin Trask <austin@artefactual.com> + +It was downloaded from http://archivematica.org + +Upstream Author(s): + + Joseph Perry <joseph@artefactual.com> + Jesus Garcia Crespo <jesus@artefactual.com> + Austin Trask <austin@artefactual.com> + Peter Van Garderen <peter@artefactual.com> + Evelyn McLellan <evelyn@artefactual.com> + +Copyright: + + Copyright (C) 2010-2012 Artefactual Systems Inc. <http://artefactual.com> + +License: + + This is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This software is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this software. If not, see <http://www.gnu.org/licenses/>. + + +The Debian packaging is: + + Copyright (C) 2010-2012 Artefactual Systems Inc. <http://artefactual.com> diff --git a/src/MCPServer/debian/postinst b/src/MCPServer/debian/postinst new file mode 100755 index 0000000000..197227e4fa --- /dev/null +++ b/src/MCPServer/debian/postinst @@ -0,0 +1,23 @@ +#!/bin/sh + +# source debconf stuff +. /usr/share/debconf/confmodule +. /usr/share/dbconfig-common/dpkg/postinst + +dbc_go archivematica-mcp-server $@ + +userID=`id -u archivematica` + +if [ "${userID}" = 333 ]; then + echo "User archivematica exists" +else + adduser --uid 333 --group --system --home /var/lib/archivematica/ archivematica +fi + +sudo usermod -a -G audio archivematica +chown -R archivematica:archivematica "/var/archivematica/" +chmod -R g+s "/var/archivematica/" +chmod -R 775 "/var/archivematica/" +echo "archivematica ALL=NOPASSWD:/bin/mv,/bin/chown,/bin/chmod,/usr/bin/gs,/usr/lib/transcoder/transcoderScripts/DocumentConverter.py,/usr/bin/inkscape,/usr/lib/archivematica/transcoder/transcoderScripts/restartOpenOffice.sh" >> /etc/sudoers +#chown archivematica:archivematica /etc/archivematica/MCPServer/dbsettings +#chmod 700 /etc/archivematica/MCPServer/dbsettings diff --git a/src/MCPServer/debian/rules b/src/MCPServer/debian/rules new file mode 100755 index 0000000000..917d9bf25d --- /dev/null +++ b/src/MCPServer/debian/rules @@ -0,0 +1,13 @@ +#!/usr/bin/make -f +# -*- makefile -*- +# Sample debian/rules that uses debhelper. +# This file was originally written by Joey Hess and Craig Small. +# As a special exception, when this file is copied by dh-make into a +# dh-make output file, you may use that output file without restriction. +# This special exception was added by Craig Small in version 0.37 of dh-make. + +# Uncomment this to turn on verbose mode. +#export DH_VERBOSE=1 + +%: + dh $@ diff --git a/src/MCPServer/etc/serverConfig.conf b/src/MCPServer/etc/serverConfig.conf new file mode 100755 index 0000000000..f7fc0fff4f --- /dev/null +++ b/src/MCPServer/etc/serverConfig.conf @@ -0,0 +1,90 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +[MCPServer] +AIPCompressionAlgorithm = lzma +AIPCompressionLevel = 1 +checksumsNoExtention = checksum +fileUUIDSHumanReadable = FileUUIDs.log +moduleConfigDir = /etc/archivematica/MCPServer/mcpModulesConfig +#to listen on localhost only, uncomment the following line +MCPArchivematicaServer = localhost:4730 +GearmanServerWorker = localhost:4730 +archivematicaProtocol = /etc/archivematica/MCPServer/archivematicaProtocol +watchDirectoryPath = /var/archivematica/sharedDirectory/watchedDirectories/ +sharedDirectory = /var/archivematica/sharedDirectory/ +processingDirectory = /var/archivematica/sharedDirectory/currentlyProcessing/ +rejectedDirectory = %%sharedPath%%rejected/ +watchDirectoriesPollInterval = 1 +MCPWaitForCopyToCompleteSeconds = 1 +actOnCopied = true +forceNoApprovalRequiredOnAllJobs = false +limitGearmanConnections = 30000 +processingXMLFile = processingMCP.xml +waitOnAutoApprove = 0 + +#transferD +##delayTimer = number of seconds to wait before marking a file as removed, after it is noted as having been moved, and not claimed by a movedTo event. +delayTimer = 3 +##waitToActOnMoves = duration to wait for microservices to update the location of the SIP/transfer in the db +waitToActOnMoves = 1 +singleInstancePIDFile = /tmp/archivematicaMCPServerPID + + +[Protocol] +#seperates Values when transported from client to server +delimiter = <!&\delimiter/&!> +#seconds to pause between sending Keep alives +keepAlivePause = 600 +maxLen = 1000000 + +#--TO CLIENT-- +#To load server configs into the client --NOT USED/NEEDED (everything can happen on server side) +addToServerConf = addToServerConf +#to tell the client to perform a task +performTask = performTask +#keeps idle connection open +keepAlive = keepAlive + +#--TO SERVER-- +#inform the server the client is capable of running a certain type of task +addToListTaskHandler = addToListTaskHandler +#inform the server a task is completed +taskCompleted = taskCompleted +#tell the server how many threads this client will run +maxTasks = setMaxTasks +setName = setName +requestLockForWrite = requestLockForWrite + +#--Gearman-- +limitGearmanConnections = 10000 +limitTaskThreads = 75 +limitTaskThreadsSleep = 0.2 +reservedAsTaskProcessingThreads = 8 + + + + + diff --git a/src/MCPServer/init.d/archivematica-mcp-serverd b/src/MCPServer/init.d/archivematica-mcp-serverd new file mode 100755 index 0000000000..8b5e551823 --- /dev/null +++ b/src/MCPServer/init.d/archivematica-mcp-serverd @@ -0,0 +1,85 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage Ingest +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +#source /etc/archivematica/archivematicaConfig.conf +set -e +pidFile="/tmp/archivematicaMCPServer.pid" +logFile="/var/archivematica/sharedDirectory/archivematicaMCPServer.log" + + +startArchivematica() { + echo "Starting" + tmpDir="`pwd`" + cd /usr/lib/archivematica/MCPServer/ + sudo -u archivematica twistd -y /usr/lib/archivematica/MCPServer/archivematicaMCP.py --pidfile "$pidFile" -l "$logFile" #--gid archivematica + sudo -u archivematica chmod 755 "$pidFile" + sudo -u archivematica chmod 755 "$logFile" + echo PID: `cat "$pidFile"` + cd "$tmpDir" + echo "Started" +} + +stopArchivematica() { + echo "Stopping" + "$0" status && kill `cat "$pidFile"` + echo "Stopped" +} + +case "$1" in + start) + startArchivematica + ;; + stop) + stopArchivematica + ;; + + reload|force-reload) + stopArchivematica + startArchivematica + ;; + + restart) + stopArchivematica + sleep 1 + startArchivematica + ;; + + status) + #status_of_proc -p "$pidFile" /usr/bin/python /usr/bin/twistd && exit 0 || exit $? + if [ -n "`cat "$pidFile" 2>/dev/null`" ]; then + ps `cat "$pidFile"` + exit 0 + else + echo "Not running" + exit 1 + fi + + ;; + + *) + echo "usage [start | stop | restart ]" + exit 1 +esac + +exit 0 diff --git a/src/MCPServer/init/archivematica-mcp-server.conf b/src/MCPServer/init/archivematica-mcp-server.conf new file mode 100644 index 0000000000..15a570d327 --- /dev/null +++ b/src/MCPServer/init/archivematica-mcp-server.conf @@ -0,0 +1,72 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +description "Archivematica MCP Server" +author "Austin Trask <austin@artefactual.com>, Joseph Perry <joseph@artefactual.com>" + +start on (started mysql) +stop on runlevel [016] + +console output + +env CONF=/etc/archivematica/MCPServer +env LOCATION=/usr/lib/archivematica/MCPServer/archivematicaMCP.py + +setuid archivematica +setgid archivematica + +pre-start script + + # Check that $CONF directory exists + [ -d $CONF ] + + # Wait for Gearman service + while [ ! -f /var/run/gearman/gearmand.pid ] + do + sleep 3 + done + +end script + +script + + # Build LOGFILE path + DATE=`date` + HOSTNAME=`hostname` + LOGFILE=/tmp/archivematicaMCPServer-${HOSTNAME}-${DATE}.log + + # Run + $LOCATION 1>>$LOGFILE 2>>$LOGFILE + + # Logapp + # LOGTIME=true + # APPENDLOG=true + # CIRCULARLOG=true + # MAXLOGSIZE=10000 # Max 4000000 + # logapp --logtime=$LOGTIME \ + # --maxlogsize=$MAXLOGSIZE \ + # --logfile="$LOGFILE" \ + # --appendlog=$APPENDLOG \ + # --circularlog=$CIRCULARLOG \ + # $LOCATION + +end script + + + + + diff --git a/src/MCPServer/lib/RPCServer.py b/src/MCPServer/lib/RPCServer.py new file mode 100755 index 0000000000..402a6c8017 --- /dev/null +++ b/src/MCPServer/lib/RPCServer.py @@ -0,0 +1,113 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ +import archivematicaMCP +import sys +from linkTaskManagerChoice import choicesAvailableForUnits +import lxml.etree as etree +import gearman +import cPickle +import time +import traceback +from socket import gethostname +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface + +def rpcError(code="", details=""): + ret = etree.Element("Error") + etree.SubElement(ret, "code").text = code.__str__() + etree.SubElement(ret, "details").text = details.__str__() + return ret + +def verifyDatabaseIsNotLocked(): + timeBeforeReturningErrorLockedDB = 4 + timeToSleep = 0.1 + numberOfRuns = 0 #count of number of runs in loop + while not databaseInterface.sqlLock.acquire(False): + time.sleep(timeToSleep) + numberOfRuns += 1 + if numberOfRuns * timeToSleep > timeBeforeReturningErrorLockedDB: + return rpcError(code="DatabaseLock", details="Couldn't acquire database lock") + databaseInterface.sqlLock.release() + return None + +def getJobsAwaitingApproval(): + ret = etree.Element("choicesAvailableForUnits") + dbStatus = verifyDatabaseIsNotLocked() + if dbStatus: + #print etree.tostring(dbStatus) + return etree.tostring(dbStatus) + for UUID, choice in choicesAvailableForUnits.items(): + ret.append(choice.xmlify()) + return etree.tostring(ret, pretty_print=True) + + +def approveJob(jobUUID, chain): + print "approving: ", jobUUID, chain + if jobUUID in choicesAvailableForUnits: + choicesAvailableForUnits[jobUUID].proceedWithChoice(chain) + return "approving: ", jobUUID, chain + +def gearmanApproveJob(gearman_worker, gearman_job): + try: + #execute = gearman_job.task + data = cPickle.loads(gearman_job.data) + jobUUID = data["jobUUID"] + chain = data["chain"] + ret = cPickle.dumps(approveJob(jobUUID, chain)) + if not ret: + ret = "" + return "" + #catch OS errors + except Exception as inst: + print >>sys.stderr, "DEBUG EXCEPTION! gearmanApproveJob" + traceback.print_exc(file=sys.stdout) + print >>sys.stderr, type(inst) # the exception instance + print >>sys.stderr, inst.args + return "" + +def gearmanGetJobsAwaitingApproval(gearman_worker, gearman_job): + try: + #print "DEBUG - getting list of jobs" + #execute = gearman_job.task + ret = cPickle.dumps(getJobsAwaitingApproval()) + #print ret + if not ret: + ret = "" + return ret + #catch OS errors + except Exception as inst: + print >>sys.stderr, "DEBUG EXCEPTION! gearmanGetJobsAwaitingApproval" + traceback.print_exc(file=sys.stdout) + print >>sys.stderr, type(inst) # the exception instance + print >>sys.stderr, inst.args + return "" + + +def startRPCServer(): + gm_worker = gearman.GearmanWorker([archivematicaMCP.config.get('MCPServer', 'GearmanServerWorker')]) + hostID = gethostname() + "_MCPServer" + gm_worker.set_client_id(hostID) + gm_worker.register_task("approveJob", gearmanApproveJob) + gm_worker.register_task("getJobsAwaitingApproval", gearmanGetJobsAwaitingApproval) + gm_worker.work() diff --git a/src/MCPServer/lib/archivematicaMCP.py b/src/MCPServer/lib/archivematicaMCP.py new file mode 100755 index 0000000000..a96c7ab413 --- /dev/null +++ b/src/MCPServer/lib/archivematicaMCP.py @@ -0,0 +1,333 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +#~DOC~ +# +# --- This is the MCP (master control program) --- +# The intention of this program is to provide a centralized automated distributed system for performing an arbitrary set of tasks on a directory. +# Distributed in that the work can be performed on more than one physical computer simultaneously. +# Centralized in that there is one centre point for configuring flow through the system. +# Automated in that the tasks performed will be based on the config files and instantiated for each of the targets. +# +# It loads configurations from the database. +# +import threading +import watchDirectory +from jobChain import jobChain +from unitSIP import unitSIP +from unitDIP import unitDIP +from unitFile import unitFile +from unitTransfer import unitTransfer +from pyinotify import ThreadedNotifier +import transferD +import RPCServer +import MySQLdb + +import signal +import os +import pyinotify +# from archivematicaReplacementDics import replacementDics +# from MCPlogging import * +# from MCPloggingSQL import getUTCDate +import ConfigParser +# from mcpModules.modules import modulesClass +import uuid +import string +import math +import copy +import time +import subprocess +import shlex +import sys +import lxml.etree as etree +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +import databaseFunctions +import multiprocessing +import traceback +from externals.singleInstance import singleinstance +from archivematicaFunctions import unicodeToStr + +global countOfCreateUnitAndJobChainThreaded +countOfCreateUnitAndJobChainThreaded = 0 + +config = ConfigParser.SafeConfigParser({'MCPArchivematicaServerInterface': ""}) +config.read("/etc/archivematica/MCPServer/serverConfig.conf") + +# archivematicaRD = replacementDics(config) + +#time to sleep to allow db to be updated with the new location of a SIP +dbWaitSleep = 2 +transferDMovedFromCounter = multiprocessing.Value('i', 0) + +configs = [] +jobsAwaitingApproval = [] +jobsQueue = [] #jobs shouldn't remain here long (a few seconds max) before they are turned into tasks (jobs being processed) +jobsBeingProcessed = [] +tasksQueue = [] +tasksBeingProcessed = [] +tasksLock = threading.Lock() +movingDirectoryLock = threading.Lock() +jobsLock = threading.Lock() +watchedDirectories = [] +limitTaskThreads = config.getint('Protocol', "limitTaskThreads") +limitTaskThreadsSleep = config.getfloat('Protocol', "limitTaskThreadsSleep") +limitGearmanConnectionsSemaphore = threading.Semaphore(value=config.getint('Protocol', "limitGearmanConnections")) +reservedAsTaskProcessingThreads = config.getint('Protocol', "reservedAsTaskProcessingThreads") +debug = False +stopSignalReceived = False + +def isUUID(uuid): + split = uuid.split("-") + if len(split) != 5 \ + or len(split[0]) != 8 \ + or len(split[1]) != 4 \ + or len(split[2]) != 4 \ + or len(split[3]) != 4 \ + or len(split[4]) != 12 : + return False + return True + +def findOrCreateSipInDB(path, waitSleep=dbWaitSleep): + UUID = "" + path = path.replace(config.get('MCPServer', "sharedDirectory"), "%sharedPath%", 1) + + #find UUID on end of SIP path + uuidLen = -36 + if isUUID(path[uuidLen-1:-1]): + UUID = path[uuidLen-1:-1] + + + if UUID == "": + #Find it in the database + sql = """SELECT sipUUID FROM SIPs WHERE currentPath = '""" + MySQLdb.escape_string(path) + "';" + #if waitSleep != 0: + #time.sleep(waitSleep) #let db be updated by the microservice that moved it. + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + if not row: + print "Not opening existing SIP:", UUID, "-", path + while row != None: + UUID = row[0] + print "Opening existing SIP:", UUID, "-", path + row = c.fetchone() + sqlLock.release() + + + #Create it + if UUID == "": + UUID = databaseFunctions.createSIP(path) + print "DEBUG creating sip", path, UUID + return UUID + +def createUnitAndJobChain(path, config, terminate=False): + path = unicodeToStr(path) + if os.path.isdir(path): + path = path + "/" + print "createUnitAndJobChain", path, config + unit = None + if os.path.isdir(path): + if config[3] == "SIP": + UUID = findOrCreateSipInDB(path) + unit = unitSIP(path, UUID) + elif config[3] == "DIP": + UUID = findOrCreateSipInDB(path) + unit = unitDIP(path, UUID) + elif config[3] == "Transfer": + #UUID = findOrCreateSipInDB(path) + unit = unitTransfer(path) + elif os.path.isfile(path): + if config[3] == "Transfer": + unit = unitTransfer(path) + else: + return + UUID = uuid.uuid4() + unit = unitFile(path, UUID) + else: + return + jobChain(unit, config[1]) + if terminate: + exit(0) + +def createUnitAndJobChainThreaded(path, config, terminate=True): + global countOfCreateUnitAndJobChainThreaded + #createUnitAndJobChain(path, config) + #return + try: + if debug: + print "DEBGUG alert watch path: ", path + t = threading.Thread(target=createUnitAndJobChain, args=(path, config), kwargs={"terminate":terminate}) + t.daemon = True + countOfCreateUnitAndJobChainThreaded += 1 + while(limitTaskThreads <= threading.activeCount() + reservedAsTaskProcessingThreads ): + if stopSignalReceived: + print "Signal was received; stopping createUnitAndJobChainThreaded(path, config)" + exit(0) + print threading.activeCount().__str__() + #print "DEBUG createUnitAndJobChainThreaded waiting on thread count", threading.activeCount() + time.sleep(.5) + countOfCreateUnitAndJobChainThreaded -= 1 + t.start() + except Exception as inst: + print "DEBUG EXCEPTION!" + traceback.print_exc(file=sys.stdout) + print type(inst) # the exception instance + print inst.args + +def watchDirectories(): + rows = [] + sql = """SELECT watchedDirectoryPath, chain, onlyActOnDirectories, description FROM WatchedDirectories LEFT OUTER JOIN WatchedDirectoriesExpectedTypes ON WatchedDirectories.expectedType = WatchedDirectoriesExpectedTypes.pk""" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + rows.append(row) + row = c.fetchone() + sqlLock.release() + + for row in rows: + directory = row[0].replace("%watchDirectoryPath%", config.get('MCPServer', "watchDirectoryPath"), 1) + if not os.path.isdir(directory): + os.makedirs(directory) + for item in os.listdir(directory): + if item == ".svn": + continue + item = item.decode("utf-8") + path = os.path.join(unicode(directory), item) + #createUnitAndJobChain(path, row) + while(limitTaskThreads <= threading.activeCount() + reservedAsTaskProcessingThreads ): + time.sleep(1) + createUnitAndJobChainThreaded(path, row, terminate=False) + actOnFiles=True + if row[2]: #onlyActOnDirectories + actOnFiles=False + watchDirectory.archivematicaWatchDirectory(directory,variablesAdded=row, callBackFunctionAdded=createUnitAndJobChainThreaded, alertOnFiles=actOnFiles, interval=config.getint('MCPServer', "watchDirectoriesPollInterval")) + +#if __name__ == '__main__': +# signal.signal(signal.SIGTERM, signal_handler) +# signal.signal(signal.SIGINT, signal_handler) + +#configs = loadConfigs() +#directoryWatchList = loadDirectoryWatchLlist(configs) +#archivematicaMCPServerListen() + + +def signal_handler(signalReceived, frame): + print signalReceived, frame + global stopSignalReceived + stopSignalReceived = True + threads = threading.enumerate() + for thread in threads: + if False and isinstance(thread, threading.Thread): + try: + print "not stopping: ", type(thread), thread + except Exception as inst: + print "DEBUG EXCEPTION!" + print type(inst) # the exception instance + print inst.args + elif isinstance(thread, pyinotify.ThreadedNotifier): + print "stopping: ", type(thread), thread + try: + thread.stop() + except Exception as inst: + print >>sys.stderr, "DEBUG EXCEPTION!" + print >>sys.stderr, type(inst) # the exception instance + print >>sys.stderr, inst.args + else: + print "not stopping: ", type(thread), thread + sys.stdout.flush() + sys.stderr.flush() + sys.exit(0) + exit(0) + +def debugMonitor(): + global countOfCreateUnitAndJobChainThreaded + while True: + dblockstatus = "SQL Lock: Locked" + if databaseInterface.sqlLock.acquire(False): + databaseInterface.sqlLock.release() + dblockstatus = "SQL Lock: Unlocked" + print "<DEBUG type=\"archivematicaMCP\">", "\tDate Time: ", databaseInterface.getUTCDate(), "\tThreadCount: ", threading.activeCount(), "\tcountOfCreateUnitAndJobChainThreaded", countOfCreateUnitAndJobChainThreaded, dblockstatus, "</DEBUG>" + time.sleep(60) + +def flushOutputs(): + while True: + sys.stdout.flush() + sys.stderr.flush() + time.sleep(5) + +def startTransferD(): + p = multiprocessing.Process(target=transferD.mainWithMovedFromCounter, args=(transferDMovedFromCounter,)) + p.start() + print >>sys.stderr, "transferD started - PID:", p.pid + while p.is_alive(): + time.sleep(5) + print >>sys.stderr, "transferD crashed\n exitCode:", p.exitcode + + +def cleanupOldDbEntriesOnNewRun(): + sql = """DELETE FROM Jobs WHERE Jobs.currentStep = 'Awaiting decision';""" + databaseInterface.runSQL(sql) + + sql = """UPDATE Jobs SET currentStep='Failed' WHERE currentStep='Executing command(s)';""" + databaseInterface.runSQL(sql) + + sql = """UPDATE Tasks SET exitCode=-1, stdError='MCP shut down while processing.' WHERE exitCode IS NULL;""" + databaseInterface.runSQL(sql) + + + +if __name__ == '__main__': + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + si = singleinstance(config.get('MCPServer', "singleInstancePIDFile")) + if si.alreadyrunning(): + print >>sys.stderr, "Another instance is already running. Killing PID:", si.pid + si.kill() + elif False: #testing single instance stuff + while 1: + print "psudo run" + time.sleep(3) + print "This PID: ", si.pid + + if True: + import getpass + print "user: ", getpass.getuser() + os.setuid(333) + if False: + t = threading.Thread(target=debugMonitor) + t.daemon = True + t.start() + if True: + t = threading.Thread(target=flushOutputs) + t.daemon = True + t.start() + + cleanupOldDbEntriesOnNewRun() + watchDirectories() + #t = threading.Thread(target=startTransferD) + #t.daemon = True + #t.start() + + # This is blocking the main thread with the worker loop + RPCServer.startRPCServer() diff --git a/src/MCPServer/lib/jobChain.py b/src/MCPServer/lib/jobChain.py new file mode 100755 index 0000000000..9ca09e6975 --- /dev/null +++ b/src/MCPServer/lib/jobChain.py @@ -0,0 +1,86 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +import sys +import threading +from jobChainLink import jobChainLink +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +#Holds: +#-UNIT +#-Job chain link +#-Job chain description +# +#potentialToHold/getFromDB +#-previous chain links +class jobChain: + def __init__(self, unit, chainPK, notifyComplete=None, passVar=None, UUID=None, subJobOf=""): + print "jobChain", unit, chainPK + if chainPK == None: + return None + self.unit = unit + self.pk = chainPK + self.notifyComplete = notifyComplete + self.UUID = UUID + self.linkSplitCount = 1 + self.subJobOf = subJobOf + sql = """SELECT * FROM MicroServiceChains WHERE pk = """ + chainPK.__str__() + print sql + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + if row == None: + sqlLock.release() + return None + while row != None: + print "jobChain", row + #self.pk = row[0] + self.startingChainLink = row[1] + self.description = row[2] + row = c.fetchone() + sqlLock.release() + self.currentLink = jobChainLink(self, self.startingChainLink, unit, passVar=passVar, subJobOf=subJobOf) + if self.currentLink == None: + return None + + def nextChainLink(self, pk, passVar=None, incrementLinkSplit=False, subJobOf=""): + if self.subJobOf and not subJobOf: + subJobOf = self.subJobOf + if incrementLinkSplit: + self.linkSplitCount += 1 + if pk != None: + # may 2012 - can't think why I'm threading this - TODO + # I think it was threaded to avoid nasty stack trace problems + #t = threading.Thread(target=self.nextChainLinkThreaded, args=(pk,), kwargs={"passVar":passVar} ) + #t.daemon = True + #t.start() + jobChainLink(self, pk, self.unit, passVar=passVar, subJobOf=subJobOf) + else: + self.linkSplitCount -= 1 + if self.linkSplitCount == 0: + print "Done with UNIT:" + self.unit.UUID + if self.notifyComplete: + self.notifyComplete(self) + + def nextChainLinkThreaded(self, pk, passVar=None): + self.currentLink = jobChainLink(self, pk, self.unit, passVar) diff --git a/src/MCPServer/lib/jobChainLink.py b/src/MCPServer/lib/jobChainLink.py new file mode 100755 index 0000000000..8df20cb420 --- /dev/null +++ b/src/MCPServer/lib/jobChainLink.py @@ -0,0 +1,183 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ +import sys +import uuid +import MySQLdb +from linkTaskManagerDirectories import linkTaskManagerDirectories +from linkTaskManagerFiles import linkTaskManagerFiles +from linkTaskManagerChoice import linkTaskManagerChoice +from linkTaskManagerAssignMagicLink import linkTaskManagerAssignMagicLink +from linkTaskManagerLoadMagicLink import linkTaskManagerLoadMagicLink +from linkTaskManagerReplacementDicFromChoice import linkTaskManagerReplacementDicFromChoice +from linkTaskManagerSplit import linkTaskManagerSplit +from linkTaskManagerSplitOnFileIdAndruleset import linkTaskManagerSplitOnFileIdAndruleset +from linkTaskManagerTranscoderCommand import linkTaskManagerTranscoderCommand +from linkTaskManagerGetMicroserviceGeneratedListInStdOut import linkTaskManagerGetMicroserviceGeneratedListInStdOut +from linkTaskManagerGetUserChoiceFromMicroserviceGeneratedList import linkTaskManagerGetUserChoiceFromMicroserviceGeneratedList +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +from databaseFunctions import logJobCreatedSQL +from playAudioFileInCVLC import playAudioFileInThread + +#Constants +# SELECT * FROM TaskTypes; +constOneTask = 0 +constTaskForEachFile = 1 +constSelectPathTask = 2 +constSetMagicLink = 3 +constLoadMagicLink = 4 +constGetReplacementDic = 5 +constSplitByFile = 6 +constlinkTaskManagerSplitOnFileIdAndruleset = 7 +constTranscoderTaskLink = 8 +constlinkTaskManagerGetMicroserviceGeneratedListInStdOut = 9 +constlinkTaskManagerGetUserChoiceFromMicroserviceGeneratedList = 10 + +class jobChainLink: + def __init__(self, jobChain, jobChainLinkPK, unit, passVar=None, subJobOf=""): + if jobChainLinkPK == None: + return None + self.UUID = uuid.uuid4().__str__() + self.jobChain = jobChain + self.pk = jobChainLinkPK + self.unit = unit + self.passVar=passVar + self.createdDate = databaseInterface.getUTCDate() + self.subJobOf = subJobOf + sql = """SELECT MicroServiceChainLinks.currentTask, MicroServiceChainLinks.defaultNextChainLink, TasksConfigs.taskType, TasksConfigs.taskTypePKReference, TasksConfigs.description, MicroServiceChainLinks.reloadFileList, Sounds.fileLocation, MicroServiceChainLinks.defaultExitMessage, MicroServiceChainLinks.microserviceGroup FROM MicroServiceChainLinks LEFT OUTER JOIN Sounds ON MicroServiceChainLinks.defaultPlaySound = Sounds.pk JOIN TasksConfigs on MicroServiceChainLinks.currentTask = TasksConfigs.pk WHERE MicroServiceChainLinks.pk = """ + jobChainLinkPK.__str__() + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + if row == None: + sqlLock.release() + return None + while row != None: + print row + self.currentTask = row[0] + self.defaultNextChainLink = row[1] + taskType = row[2] + taskTypePKReference = row[3] + self.description = row[4] + self.reloadFileList = row[5] + self.defaultSoundFile = row[6] + self.defaultExitMessage = row[7] + self.microserviceGroup = row[8] + row = c.fetchone() + sqlLock.release() + + + + print "<<<<<<<<< ", self.description, " >>>>>>>>>" + self.unit.reload() + + logJobCreatedSQL(self) + + if self.createTasks(taskType, taskTypePKReference) == None: + self.getNextChainLinkPK(None) + #can't have none represent end of chain, and no tasks to process. + #could return negative? + + def createTasks(self, taskType, taskTypePKReference): + if taskType == constOneTask: + linkTaskManagerDirectories(self, taskTypePKReference, self.unit) + elif taskType == constTaskForEachFile: + if self.reloadFileList: + self.unit.reloadFileList(); + linkTaskManagerFiles(self, taskTypePKReference, self.unit) + elif taskType == constSelectPathTask: + linkTaskManagerChoice(self, taskTypePKReference, self.unit) + elif taskType == constSetMagicLink: + linkTaskManagerAssignMagicLink(self, taskTypePKReference, self.unit) + elif taskType == constLoadMagicLink: + linkTaskManagerLoadMagicLink(self, taskTypePKReference, self.unit) + elif taskType == constGetReplacementDic: + linkTaskManagerReplacementDicFromChoice(self, taskTypePKReference, self.unit) + elif taskType == constSplitByFile: + if self.reloadFileList: + self.unit.reloadFileList(); + linkTaskManagerSplit(self, taskTypePKReference, self.unit) + elif taskType == constlinkTaskManagerSplitOnFileIdAndruleset: + if self.reloadFileList: + self.unit.reloadFileList(); + linkTaskManagerSplitOnFileIdAndruleset(self, taskTypePKReference, self.unit) + elif taskType == constTranscoderTaskLink: + if self.reloadFileList: + self.unit.reloadFileList(); + linkTaskManagerTranscoderCommand(self, taskTypePKReference, self.unit) + elif taskType == constlinkTaskManagerGetMicroserviceGeneratedListInStdOut: + linkTaskManagerGetMicroserviceGeneratedListInStdOut(self, taskTypePKReference, self.unit) + elif taskType == constlinkTaskManagerGetUserChoiceFromMicroserviceGeneratedList: + linkTaskManagerGetUserChoiceFromMicroserviceGeneratedList(self, taskTypePKReference, self.unit) + else: + print sys.stderr, "unsupported task type: ", taskType + + def getSoundFileToPlay(self, exitCode): + if exitCode != None: + ret = self.defaultSoundFile + sql = "SELECT Sounds.fileLocation FROM MicroServiceChainLinksExitCodes LEFT OUTER JOIN Sounds ON MicroServiceChainLinksExitCodes.playSound = Sounds.pk WHERE microServiceChainLink = %s AND exitCode = %s" % (self.pk.__str__(), exitCode.__str__()) + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + if row != None: + ret = row[0] + sqlLock.release() + return ret + + def getNextChainLinkPK(self, exitCode): + if exitCode != None: + ret = self.defaultNextChainLink + sql = "SELECT nextMicroServiceChainLink FROM MicroServiceChainLinksExitCodes WHERE microServiceChainLink = %s AND exitCode = %s" % (self.pk.__str__(), exitCode.__str__()) + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + if row != None: + ret = row[0] + sqlLock.release() + return ret + + def setExitMessage(self, message): + databaseInterface.runSQL("UPDATE Jobs " + \ + "SET currentStep='" + MySQLdb.escape_string(message.__str__()) + "' " + \ + "WHERE jobUUID='" + self.UUID + "'" ) + + def updateExitMessage(self, exitCode): + ret = self.defaultExitMessage + if exitCode != None: + sql = "SELECT exitMessage FROM MicroServiceChainLinksExitCodes WHERE microServiceChainLink = %s AND exitCode = %s" % (self.pk.__str__(), exitCode.__str__()) + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + if row != None: + ret = row[0] + sqlLock.release() + if ret != None: + self.setExitMessage(ret) + else: + print "No exit message" + + def linkProcessingComplete(self, exitCode, passVar=None): + playSounds = True + if playSounds: + filePath = self.getSoundFileToPlay(exitCode) + if filePath: + print "playing: ", filePath + playAudioFileInThread(filePath) + self.updateExitMessage(exitCode) + self.jobChain.nextChainLink(self.getNextChainLinkPK(exitCode), passVar=passVar) diff --git a/src/MCPServer/lib/linkTaskManager.py b/src/MCPServer/lib/linkTaskManager.py new file mode 100755 index 0000000000..a3f785b0c1 --- /dev/null +++ b/src/MCPServer/lib/linkTaskManager.py @@ -0,0 +1,28 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +class linkTaskManager: + def __init__(self, pk, unit): + self.tasks = [] + self.pk = pk diff --git a/src/MCPServer/lib/linkTaskManagerAssignMagicLink.py b/src/MCPServer/lib/linkTaskManagerAssignMagicLink.py new file mode 100755 index 0000000000..84a9e142e2 --- /dev/null +++ b/src/MCPServer/lib/linkTaskManagerAssignMagicLink.py @@ -0,0 +1,65 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +import databaseInterface +import threading +import uuid +import sys +import time +#select * from MicroServiceChainChoice JOIN MicroServiceChains on chainAvailable = MicroServiceChains.pk; +#| pk | choiceAvailableAtLink | chainAvailable | pk | startingLink | description + +from linkTaskManager import linkTaskManager +from taskStandard import taskStandard +import jobChain +import databaseInterface +import lxml.etree as etree +import os +import archivematicaMCP +global choicesAvailableForUnits +choicesAvailableForUnits = {} +choicesAvailableForUnitsLock = threading.Lock() + +class linkTaskManagerAssignMagicLink: + def __init__(self, jobChainLink, pk, unit): + self.pk = pk + self.jobChainLink = jobChainLink + self.UUID = uuid.uuid4().__str__() + self.unit = unit + + ###GET THE MAGIC NUMBER FROM THE TASK stuff + link = 0 + sql = """SELECT execute FROM StandardTasksConfigs where pk = """ + pk.__str__() + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + print row + link = row[0] + row = c.fetchone() + sqlLock.release() + + ###Update the unit + #set the magic number + self.unit.setMagicLink(link, exitStatus="") + self.jobChainLink.linkProcessingComplete(0) diff --git a/src/MCPServer/lib/linkTaskManagerChoice.py b/src/MCPServer/lib/linkTaskManagerChoice.py new file mode 100755 index 0000000000..1c89f91840 --- /dev/null +++ b/src/MCPServer/lib/linkTaskManagerChoice.py @@ -0,0 +1,166 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +import databaseInterface +import datetime +import threading +import uuid +import sys +import time +#select * from MicroServiceChainChoice JOIN MicroServiceChains on chainAvailable = MicroServiceChains.pk; +#| pk | choiceAvailableAtLink | chainAvailable | pk | startingLink | description + +from linkTaskManager import linkTaskManager +from taskStandard import taskStandard +from executeOrRunSubProcess import executeOrRun +import jobChain +import databaseInterface +import lxml.etree as etree +import os +import archivematicaMCP +global choicesAvailableForUnits +choicesAvailableForUnits = {} +choicesAvailableForUnitsLock = threading.Lock() +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from archivematicaFunctions import unicodeToStr + +waitingOnTimer="waitingOnTimer" + +class linkTaskManagerChoice: + def __init__(self, jobChainLink, pk, unit): + self.choices = [] + self.pk = pk + self.jobChainLink = jobChainLink + self.UUID = uuid.uuid4().__str__() + self.unit = unit + self.delayTimerLock = threading.Lock() + self.delayTimer = None + sql = """SELECT chainAvailable, description FROM MicroServiceChainChoice JOIN MicroServiceChains on chainAvailable = MicroServiceChains.pk WHERE choiceAvailableAtLink = %s ORDER BY MicroServiceChainChoice.pk;""" % (jobChainLink.pk.__str__()) + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + print row + chainAvailable = row[0] + description = row[1] + self.choices.append((chainAvailable, description,)) + row = c.fetchone() + sqlLock.release() + + preConfiguredChain = self.checkForPreconfiguredXML() + if preConfiguredChain != None: + time.sleep(archivematicaMCP.config.getint('MCPServer', "waitOnAutoApprove")) + self.jobChainLink.setExitMessage("Completed successfully") + jobChain.jobChain(self.unit, preConfiguredChain) + + else: + choicesAvailableForUnitsLock.acquire() + if self.delayTimer == None: + self.jobChainLink.setExitMessage('Awaiting decision') + choicesAvailableForUnits[self.jobChainLink.UUID] = self + choicesAvailableForUnitsLock.release() + + def checkForPreconfiguredXML(self): + ret = None + xmlFilePath = os.path.join( \ + self.unit.currentPath.replace("%sharedPath%", archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1), \ + archivematicaMCP.config.get('MCPServer', "processingXMLFile") \ + ) + xmlFilePath = unicodeToStr(xmlFilePath) + if os.path.isfile(xmlFilePath): + # For a list of items with pks: + # SELECT TasksConfigs.description, choiceAvailableAtLink, ' ' AS 'SPACE', MicroServiceChains.description, chainAvailable FROM MicroServiceChainChoice Join MicroServiceChains on MicroServiceChainChoice.chainAvailable = MicroServiceChains.pk Join MicroServiceChainLinks on MicroServiceChainLinks.pk = MicroServiceChainChoice.choiceAvailableAtLink Join TasksConfigs on TasksConfigs.pk = MicroServiceChainLinks.currentTask ORDER BY choiceAvailableAtLink desc; + try: + command = "sudo chmod 774 \"" + xmlFilePath + "\"" + if isinstance(command, unicode): + command = command.encode("utf-8") + exitCode, stdOut, stdError = executeOrRun("command", command, "", printing=False) + tree = etree.parse(xmlFilePath) + root = tree.getroot() + for preconfiguredChoice in root.find("preconfiguredChoices"): + #if int(preconfiguredChoice.find("appliesTo").text) == self.jobChainLink.pk: + if preconfiguredChoice.find("appliesTo").text == self.jobChainLink.description: + desiredChoice = preconfiguredChoice.find("goToChain").text + sql = """SELECT MicroServiceChains.pk FROM MicroServiceChainChoice Join MicroServiceChains on MicroServiceChainChoice.chainAvailable = MicroServiceChains.pk WHERE MicroServiceChains.description = '%s' AND MicroServiceChainChoice.choiceAvailableAtLink = %s;""" % (desiredChoice, self.jobChainLink.pk.__str__()) + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + ret = row[0] + row = c.fetchone() + sqlLock.release() + try: + #<delay unitAtime="yes">30</delay> + delayXML = preconfiguredChoice.find("delay") + unitAtimeXML = delayXML.get("unitCtime") + if unitAtimeXML != None and unitAtimeXML.lower() != "no": + delaySeconds=int(delayXML.text) + unitTime = os.path.getmtime(self.unit.currentPath.replace("%sharedPath%", \ + archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1)) + nowTime=time.time() + timeDifference = nowTime - unitTime + timeToGo = delaySeconds - timeDifference + print "time to go:", timeToGo + #print "that will be: ", (nowTime + timeToGo) + self.jobChainLink.setExitMessage("Waiting till: " + datetime.datetime.fromtimestamp((nowTime + timeToGo)).ctime()) + + t = threading.Timer(timeToGo, self.proceedWithChoice, args=[ret], kwargs={"delayTimerStart":True}) + t.daemon = True + self.delayTimer = t + t.start() + return None + + except Exception as inst: + print >>sys.stderr, "Error parsing xml:" + print >>sys.stderr, type(inst) + print >>sys.stderr, inst.args + + except Exception as inst: + print >>sys.stderr, "Error parsing xml:" + print >>sys.stderr, type(inst) + print >>sys.stderr, inst.args + return ret + + def xmlify(self): + ret = etree.Element("choicesAvailableForUnit") + etree.SubElement(ret, "UUID").text = self.jobChainLink.UUID + ret.append(self.unit.xmlify()) + choices = etree.SubElement(ret, "choices") + for chainAvailable, description in self.choices: + choice = etree.SubElement(choices, "choice") + etree.SubElement(choice, "chainAvailable").text = chainAvailable.__str__() + etree.SubElement(choice, "description").text = description + return ret + + + + def proceedWithChoice(self, chain, delayTimerStart=False): + choicesAvailableForUnitsLock.acquire() + del choicesAvailableForUnits[self.jobChainLink.UUID] + self.delayTimerLock.acquire() + if self.delayTimer != None and not delayTimerStart: + self.delayTimer.cancel() + self.delayTimer = None + self.delayTimerLock.release() + choicesAvailableForUnitsLock.release() + self.jobChainLink.setExitMessage("Completed successfully") + jobChain.jobChain(self.unit, chain) diff --git a/src/MCPServer/lib/linkTaskManagerDirectories.py b/src/MCPServer/lib/linkTaskManagerDirectories.py new file mode 100755 index 0000000000..9b9d7ed8c2 --- /dev/null +++ b/src/MCPServer/lib/linkTaskManagerDirectories.py @@ -0,0 +1,106 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +from linkTaskManager import linkTaskManager +from taskStandard import taskStandard +from passClasses import * +import os +import uuid +import sys +import threading +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +import databaseFunctions +from databaseFunctions import deUnicode + + +class linkTaskManagerDirectories: + def __init__(self, jobChainLink, pk, unit): + self.tasks = [] + self.pk = pk + self.jobChainLink = jobChainLink + sql = """SELECT * FROM StandardTasksConfigs where pk = """ + pk.__str__() + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + print row + #pk = row[0] + filterFileEnd = deUnicode(row[1]) + filterFileStart = deUnicode(row[2]) + filterSubDir = deUnicode(row[3]) + self.requiresOutputLock = deUnicode(row[4]) + standardOutputFile = deUnicode(row[5]) + standardErrorFile = deUnicode(row[6]) + execute = deUnicode(row[7]) + self.execute = execute + arguments = deUnicode(row[8]) + row = c.fetchone() + sqlLock.release() + #if reloadFileList: + # unit.reloadFileList() + + # "%taskUUID%": task.UUID.__str__(), \ + + if filterSubDir: + directory = os.path.join(unit.currentPath, filterSubDir) + else: + directory = unit.currentPath + + if self.jobChainLink.passVar != None: + if isinstance(self.jobChainLink.passVar, list): + for passVar in self.jobChainLink.passVar: + if isinstance(passVar, replacementDic): + execute, arguments, standardOutputFile, standardErrorFile = passVar.replace(execute, arguments, standardOutputFile, standardErrorFile) + elif isinstance(self.jobChainLink.passVar, replacementDic): + execute, arguments, standardOutputFile, standardErrorFile = self.jobChainLink.passVar.replace(execute, arguments, standardOutputFile, standardErrorFile) + + commandReplacementDic = unit.getReplacementDic(directory) + #for each key replace all instances of the key in the command string + for key in commandReplacementDic.iterkeys(): + value = commandReplacementDic[key].replace("\"", ("\\\"")) + if execute: + execute = execute.replace(key, value) + if arguments: + arguments = arguments.replace(key, value) + if standardOutputFile: + standardOutputFile = standardOutputFile.replace(key, value) + if standardErrorFile: + standardErrorFile = standardErrorFile.replace(key, value) + + UUID = uuid.uuid4().__str__() + self.task = taskStandard(self, execute, arguments, standardOutputFile, standardErrorFile, UUID=UUID) + databaseFunctions.logTaskCreatedSQL(self, commandReplacementDic, UUID, arguments) + t = threading.Thread(target=self.task.performTask) + t.daemon = True + t.start() + + + + + + def taskCompletedCallBackFunction(self, task): + print task + databaseFunctions.logTaskCompletedSQL(task) + if True: + self.jobChainLink.linkProcessingComplete(task.results["exitCode"], self.jobChainLink.passVar) diff --git a/src/MCPServer/lib/linkTaskManagerFiles.py b/src/MCPServer/lib/linkTaskManagerFiles.py new file mode 100755 index 0000000000..8673043b0e --- /dev/null +++ b/src/MCPServer/lib/linkTaskManagerFiles.py @@ -0,0 +1,176 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +from linkTaskManager import linkTaskManager +from taskStandard import taskStandard +from unitFile import unitFile +from passClasses import * +import databaseInterface +import threading +import math +import uuid +import time +import sys +import archivematicaMCP +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseFunctions +from databaseFunctions import deUnicode + +import os + + +class linkTaskManagerFiles: + def __init__(self, jobChainLink, pk, unit): + self.tasks = {} + self.tasksLock = threading.Lock() + self.pk = pk + self.jobChainLink = jobChainLink + self.exitCode = 0 + self.clearToNextLink = False + sql = """SELECT * FROM StandardTasksConfigs where pk = """ + pk.__str__() + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + filterFileEnd = deUnicode(row[1]) + filterFileStart = deUnicode(row[2]) + filterSubDir = deUnicode(row[3]) + requiresOutputLock = row[4] + self.standardOutputFile = deUnicode(row[5]) + self.standardErrorFile = deUnicode(row[6]) + self.execute = deUnicode(row[7]) + self.arguments = deUnicode(row[8]) + row = c.fetchone() + sqlLock.release() + if requiresOutputLock: + outputLock = threading.Lock() + else: + outputLock = None + + SIPReplacementDic = unit.getReplacementDic(unit.currentPath) + self.tasksLock.acquire() + for file, fileUnit in unit.fileList.items(): + #print "file:", file, fileUnit + if filterFileEnd: + if not file.endswith(filterFileEnd): + continue + if filterFileStart: + if not os.path.basename(file).startswith(filterFileStart): + continue + if filterSubDir: + #print "file", file, type(file) + #print unit.pathString, type(unit.pathString) + #filterSubDir = filterSubDir.encode('utf-8') + #print filterSubDir, type(filterSubDir) + + if not file.startswith(unit.pathString + filterSubDir): + continue + + standardOutputFile = self.standardOutputFile + standardErrorFile = self.standardErrorFile + execute = self.execute + arguments = self.arguments + + if self.jobChainLink.passVar != None: + if isinstance(self.jobChainLink.passVar, list): + for passVar in self.jobChainLink.passVar: + if isinstance(passVar, replacementDic): + execute, arguments, standardOutputFile, standardErrorFile = passVar.replace(execute, arguments, standardOutputFile, standardErrorFile) + elif isinstance(self.jobChainLink.passVar, replacementDic): + execute, arguments, standardOutputFile, standardErrorFile = self.jobChainLink.passVar.replace(execute, arguments, standardOutputFile, standardErrorFile) + + commandReplacementDic = fileUnit.getReplacementDic() + for key in commandReplacementDic.iterkeys(): + value = commandReplacementDic[key].replace("\"", ("\\\"")) + #print "key", type(key), key + #print "value", type(value), value + if isinstance(value, unicode): + value = value.encode("utf-8") + #key = key.encode("utf-8") + #value = value.encode("utf-8") + if execute: + execute = execute.replace(key, value) + if arguments: + arguments = arguments.replace(key, value) + if standardOutputFile: + standardOutputFile = standardOutputFile.replace(key, value) + if standardErrorFile: + standardErrorFile = standardErrorFile.replace(key, value) + + for key in SIPReplacementDic.iterkeys(): + value = SIPReplacementDic[key].replace("\"", ("\\\"")) + #print "key", type(key), key + #print "value", type(value), value + if isinstance(value, unicode): + value = value.encode("utf-8") + #key = key.encode("utf-8") + #value = value.encode("utf-8") + + if execute: + execute = execute.replace(key, value) + if arguments: + arguments = arguments.replace(key, value) + if standardOutputFile: + standardOutputFile = standardOutputFile.replace(key, value) + if standardErrorFile: + standardErrorFile = standardErrorFile.replace(key, value) + + UUID = uuid.uuid4().__str__() + task = taskStandard(self, execute, arguments, standardOutputFile, standardErrorFile, outputLock=outputLock, UUID=UUID) + self.tasks[UUID] = task + databaseFunctions.logTaskCreatedSQL(self, commandReplacementDic, UUID, arguments) + t = threading.Thread(target=task.performTask) + t.daemon = True + while(archivematicaMCP.limitTaskThreads <= threading.activeCount()): + #print "Waiting for active threads", threading.activeCount() + self.tasksLock.release() + time.sleep(archivematicaMCP.limitTaskThreadsSleep) + self.tasksLock.acquire() + print "Active threads:", threading.activeCount() + t.start() + + + self.clearToNextLink = True + self.tasksLock.release() + if self.tasks == {} : + self.jobChainLink.linkProcessingComplete(self.exitCode) + + + def taskCompletedCallBackFunction(self, task): + print task + #logTaskCompleted() + self.exitCode += math.fabs(task.results["exitCode"]) + databaseFunctions.logTaskCompletedSQL(task) + + if task.UUID in self.tasks: + del self.tasks[task.UUID] + else: + print >>sys.stderr, "Key Value Error:", task.UUID + print >>sys.stderr, "Key Value Error:", self.tasks + exit(1) + + self.tasksLock.acquire() + if self.clearToNextLink == True and self.tasks == {} : + print "DEBUG proceeding to next link", self.jobChainLink.UUID + self.jobChainLink.linkProcessingComplete(self.exitCode, self.jobChainLink.passVar) + self.tasksLock.release() diff --git a/src/MCPServer/lib/linkTaskManagerGetMicroserviceGeneratedListInStdOut.py b/src/MCPServer/lib/linkTaskManagerGetMicroserviceGeneratedListInStdOut.py new file mode 100755 index 0000000000..0cfa60805f --- /dev/null +++ b/src/MCPServer/lib/linkTaskManagerGetMicroserviceGeneratedListInStdOut.py @@ -0,0 +1,124 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +from linkTaskManager import linkTaskManager +from taskStandard import taskStandard +from passClasses import choicesDic +from passClasses import replacementDic +import os +import uuid +import sys +import threading +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +import databaseFunctions + + +class linkTaskManagerGetMicroserviceGeneratedListInStdOut: + def __init__(self, jobChainLink, pk, unit): + self.tasks = [] + self.pk = pk + self.jobChainLink = jobChainLink + sql = """SELECT * FROM StandardTasksConfigs where pk = """ + pk.__str__() + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + print row + #pk = row[0] + filterFileEnd = row[1] + filterFileStart = row[2] + filterSubDir = row[3] + self.requiresOutputLock = row[4] + standardOutputFile = row[5] + standardErrorFile = row[6] + execute = row[7] + self.execute = execute + arguments = row[8] + row = c.fetchone() + sqlLock.release() + + #if reloadFileList: + # unit.reloadFileList() + + # "%taskUUID%": task.UUID.__str__(), \ + + if filterSubDir: + directory = os.path.join(unit.currentPath, filterSubDir) + else: + directory = unit.currentPath + + if self.jobChainLink.passVar != None: + if isinstance(self.jobChainLink.passVar, list): + for passVar in self.jobChainLink.passVar: + if isinstance(passVar, replacementDic): + execute, arguments, standardOutputFile, standardErrorFile = passVar.replace(execute, arguments, standardOutputFile, standardErrorFile) + elif isinstance(self.jobChainLink.passVar, replacementDic): + execute, arguments, standardOutputFile, standardErrorFile = self.jobChainLink.passVar.replace(execute, arguments, standardOutputFile, standardErrorFile) + + commandReplacementDic = unit.getReplacementDic(directory) + #for each key replace all instances of the key in the command string + for key in commandReplacementDic.iterkeys(): + value = commandReplacementDic[key].replace("\"", ("\\\"")) + if execute: + execute = execute.replace(key, value) + if arguments: + arguments = arguments.replace(key, value) + if standardOutputFile: + standardOutputFile = standardOutputFile.replace(key, value) + if standardErrorFile: + standardErrorFile = standardErrorFile.replace(key, value) + + UUID = uuid.uuid4().__str__() + self.task = taskStandard(self, execute, arguments, standardOutputFile, standardErrorFile, UUID=UUID) + databaseFunctions.logTaskCreatedSQL(self, commandReplacementDic, UUID, arguments) + t = threading.Thread(target=self.task.performTask) + t.daemon = True + t.start() + + + + + + def taskCompletedCallBackFunction(self, task): + print task + databaseFunctions.logTaskCompletedSQL(task) + try: + choices = choicesDic(eval(task.results["stdOut"])) + except: + print >>sys.stderr, "Error creating dic from output" + choices = choicesDic({}) + if self.jobChainLink.passVar != None: + if isinstance(self.jobChainLink.passVar, list): + found = False + for passVarIndex in range(len(self.jobChainLink.passVar)): + if isinstance(self.jobChainLink.passVar[passVarIndex], choicesDic): + self.jobChainLink.passVar[passVarIndex] = choices + if not found: + self.jobChainLink.passVar.append(choices) + else: + self.jobChainLink.passVar = [choices, self.jobChainLink.passVar] + else: + self.jobChainLink.passVar = [choices] + if True: + self.jobChainLink.linkProcessingComplete(task.results["exitCode"], self.jobChainLink.passVar) diff --git a/src/MCPServer/lib/linkTaskManagerGetUserChoiceFromMicroserviceGeneratedList.py b/src/MCPServer/lib/linkTaskManagerGetUserChoiceFromMicroserviceGeneratedList.py new file mode 100755 index 0000000000..a6be8578d2 --- /dev/null +++ b/src/MCPServer/lib/linkTaskManagerGetUserChoiceFromMicroserviceGeneratedList.py @@ -0,0 +1,226 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +import databaseInterface +import datetime +import threading +import uuid +import sys +import time + +from linkTaskManager import linkTaskManager +from taskStandard import taskStandard +import jobChain +import databaseInterface +import lxml.etree as etree +import os +import archivematicaMCP +from linkTaskManagerChoice import choicesAvailableForUnits +from linkTaskManagerChoice import choicesAvailableForUnitsLock +from linkTaskManagerChoice import waitingOnTimer +from linkTaskManagerGetMicroserviceGeneratedListInStdOut import choicesDic +from passClasses import replacementDic + +class linkTaskManagerGetUserChoiceFromMicroserviceGeneratedList: + def __init__(self, jobChainLink, pk, unit): + self.choices = [] + self.pk = pk + self.jobChainLink = jobChainLink + self.UUID = uuid.uuid4().__str__() + self.unit = unit + sql = """SELECT execute FROM StandardTasksConfigs where pk = """ + pk.__str__() + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + choiceIndex = 0 + while row != None: + print row + key = row[0] + #self.choices.append((choiceIndex, description_, replacementDic_)) + row = c.fetchone() + #choiceIndex += 1 + sqlLock.release() + if isinstance(self.jobChainLink.passVar, list): + found = False + for item in self.jobChainLink.passVar: + print >>sys.stderr + print >>sys.stderr + print >>sys.stderr + print >>sys.stderr, isinstance(item, choicesDic), item + if isinstance(item, choicesDic): + for description_, value in item.dic.iteritems(): + replacementDic_ = {key: value}.__str__() + self.choices.append((choiceIndex, description_, replacementDic_)) + choiceIndex += 1 + found = True + break + if not found: + print >>sys.stderr, "self.jobChainLink.passVar", self.jobChainLink.passVar + throw(exception2) + else: + throw(exception) + + print "choices", self.choices + + preConfiguredChain = self.checkForPreconfiguredXML() + if preConfiguredChain != None: + if preConfiguredChain != waitingOnTimer: + #time.sleep(archivematicaMCP.config.getint('MCPServer', "waitOnAutoApprove")) + #print "checking for xml file for processing rules. TODO" + self.jobChainLink.setExitMessage("Completed successfully") + #jobChain.jobChain(self.unit, preConfiguredChain) + rd = replacementDic(eval(preConfiguredChain)) + if self.jobChainLink.passVar != None: + if isinstance(self.jobChainLink.passVar, list): + found = False + for passVar in self.jobChainLink.passVar: + if isinstance(self.jobChainLink.passVar, replacementDic): + new = {} + new.update(self.jobChainLink.passVar.dic) + new.update(rd.dic) + rd.dic = [new] + found = True + break + if not found: + self.jobChainLink.passVar.append(rd) + rd = self.jobChainLink.passVar + else: + rd = [rd] + self.jobChainLink.linkProcessingComplete(0, rd) + else: + print "waiting on delay to resume processing on unit:", unit + else: + choicesAvailableForUnitsLock.acquire() + self.jobChainLink.setExitMessage('Awaiting decision') + choicesAvailableForUnits[self.jobChainLink.UUID] = self + choicesAvailableForUnitsLock.release() + + def checkForPreconfiguredXML(self): + ret = None + xmlFilePath = os.path.join( \ + self.unit.currentPath.replace("%sharedPath%", archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1) + "/", \ + archivematicaMCP.config.get('MCPServer', "processingXMLFile") \ + ) + + if os.path.isfile(xmlFilePath): + # For a list of items with pks: + # SELECT TasksConfigs.description, choiceAvailableAtLink, ' ' AS 'SPACE', MicroServiceChains.description, chainAvailable FROM MicroServiceChainChoice Join MicroServiceChains on MicroServiceChainChoice.chainAvailable = MicroServiceChains.pk Join MicroServiceChainLinks on MicroServiceChainLinks.pk = MicroServiceChainChoice.choiceAvailableAtLink Join TasksConfigs on TasksConfigs.pk = MicroServiceChainLinks.currentTask ORDER BY choiceAvailableAtLink desc; + try: + tree = etree.parse(xmlFilePath) + root = tree.getroot() + for preconfiguredChoice in root.find("preconfiguredChoices"): + #if int(preconfiguredChoice.find("appliesTo").text) == self.jobChainLink.pk: + if preconfiguredChoice.find("appliesTo").text == self.jobChainLink.description: + desiredChoice = preconfiguredChoice.find("goToChain").text + sql = """SELECT MicroServiceChoiceReplacementDic.replacementDic FROM MicroServiceChoiceReplacementDic WHERE MicroServiceChoiceReplacementDic.description = '%s' AND MicroServiceChoiceReplacementDic.choiceAvailableAtLink = %s;""" % (desiredChoice, self.jobChainLink.pk.__str__()) + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + ret = row[0] + row = c.fetchone() + sqlLock.release() + try: + #<delay unitAtime="yes">30</delay> + delayXML = preconfiguredChoice.find("delay") + unitAtimeXML = delayXML.get("unitCtime") + if unitAtimeXML != None and unitAtimeXML.lower() != "no": + delaySeconds=int(delayXML.text) + unitTime = os.path.getmtime(self.unit.currentPath.replace("%sharedPath%", \ + archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1)) + nowTime=time.time() + timeDifference = nowTime - unitTime + timeToGo = delaySeconds - timeDifference + print "time to go:", timeToGo + #print "that will be: ", (nowTime + timeToGo) + self.jobChainLink.setExitMessage("Waiting till: " + datetime.datetime.fromtimestamp((nowTime + timeToGo)).ctime()) + rd = replacementDic(eval(ret)) + if self.jobChainLink.passVar != None: + if isinstance(self.jobChainLink.passVar, replacementDic): + new = {} + new.update(self.jobChainLink.passVar.dic) + new.update(rd.dic) + rd.dic = new + t = threading.Timer(timeToGo, self.jobChainLink.linkProcessingComplete, args=[0, rd], kwargs={}) + t.daemon = True + t.start() + + t2 = threading.Timer(timeToGo, self.jobChainLink.setExitMessage, args=["Completed successfully"], kwargs={}) + t2.start() + return waitingOnTimer + + except Exception as inst: + print >>sys.stderr, "Error parsing xml:" + print >>sys.stderr, type(inst) + print >>sys.stderr, inst.args + + except Exception as inst: + print >>sys.stderr, "Error parsing xml:" + print >>sys.stderr, type(inst) + print >>sys.stderr, inst.args + return ret + + def xmlify(self): + #print "xmlify" + ret = etree.Element("choicesAvailableForUnit") + etree.SubElement(ret, "UUID").text = self.jobChainLink.UUID + ret.append(self.unit.xmlify()) + choices = etree.SubElement(ret, "choices") + for chainAvailable, description, rd in self.choices: + choice = etree.SubElement(choices, "choice") + etree.SubElement(choice, "chainAvailable").text = chainAvailable.__str__() + etree.SubElement(choice, "description").text = description + #print etree.tostring(ret) + return ret + + + + def proceedWithChoice(self, index): + choicesAvailableForUnitsLock.acquire() + del choicesAvailableForUnits[self.jobChainLink.UUID] + choicesAvailableForUnitsLock.release() + #while archivematicaMCP.transferDMovedFromCounter.value != 0: + # print "Waiting for all files to finish updating their location in the database" + # print transferD.movedFrom + # time.sleep(1) + + #get the one at index, and go with it. + choiceIndex, description, replacementDic2 = self.choices[int(index)] + rd = replacementDic(eval(replacementDic2)) + if self.jobChainLink.passVar != None: + if isinstance(self.jobChainLink.passVar, list): + found = False + for passVar in self.jobChainLink.passVar: + if isinstance(self.jobChainLink.passVar, replacementDic): + new = {} + new.update(self.jobChainLink.passVar.dic) + new.update(rd.dic) + rd.dic = [new] + found = True + break + if not found: + self.jobChainLink.passVar.append(rd) + rd = self.jobChainLink.passVar + else: + rd = [rd] + self.jobChainLink.linkProcessingComplete(0, rd) + \ No newline at end of file diff --git a/src/MCPServer/lib/linkTaskManagerLoadMagicLink.py b/src/MCPServer/lib/linkTaskManagerLoadMagicLink.py new file mode 100755 index 0000000000..4b3c06818d --- /dev/null +++ b/src/MCPServer/lib/linkTaskManagerLoadMagicLink.py @@ -0,0 +1,56 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +import databaseInterface +import threading +import uuid +import sys +import time +#select * from MicroServiceChainChoice JOIN MicroServiceChains on chainAvailable = MicroServiceChains.pk; +#| pk | choiceAvailableAtLink | chainAvailable | pk | startingLink | description + +from linkTaskManager import linkTaskManager +from taskStandard import taskStandard +import jobChain +import databaseInterface +import lxml.etree as etree +import os +import archivematicaMCP +global choicesAvailableForUnits +choicesAvailableForUnits = {} +choicesAvailableForUnitsLock = threading.Lock() + +class linkTaskManagerLoadMagicLink: + def __init__(self, jobChainLink, pk, unit): + self.pk = pk + self.jobChainLink = jobChainLink + self.UUID = uuid.uuid4().__str__() + self.unit = unit + + ###Update the unit + magicLink = self.unit.getMagicLink() + if magicLink != None: + link, exitStatus = magicLink + self.jobChainLink.setExitMessage("Completed successfully") + self.jobChainLink.jobChain.nextChainLink(link) diff --git a/src/MCPServer/lib/linkTaskManagerReplacementDicFromChoice.py b/src/MCPServer/lib/linkTaskManagerReplacementDicFromChoice.py new file mode 100755 index 0000000000..e596dfb62a --- /dev/null +++ b/src/MCPServer/lib/linkTaskManagerReplacementDicFromChoice.py @@ -0,0 +1,208 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +import databaseInterface +import datetime +import threading +import uuid +import sys +import time +#select * from MicroServiceChainChoice JOIN MicroServiceChains on chainAvailable = MicroServiceChains.pk; +#| pk | choiceAvailableAtLink | chainAvailable | pk | startingLink | description + +from linkTaskManager import linkTaskManager +from taskStandard import taskStandard +import jobChain +import databaseInterface +import lxml.etree as etree +import os +import archivematicaMCP +from linkTaskManagerChoice import choicesAvailableForUnits +from linkTaskManagerChoice import choicesAvailableForUnitsLock +from linkTaskManagerChoice import waitingOnTimer +from passClasses import * + +class linkTaskManagerReplacementDicFromChoice: + def __init__(self, jobChainLink, pk, unit): + self.choices = [] + self.pk = pk + self.jobChainLink = jobChainLink + self.UUID = uuid.uuid4().__str__() + self.unit = unit + sql = """SELECT replacementDic, description FROM MicroServiceChoiceReplacementDic WHERE choiceAvailableAtLink = """ + jobChainLink.pk.__str__() + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + choiceIndex = 0 + while row != None: + print row + replacementDic_ = row[0] + description_ = row[1] + self.choices.append((choiceIndex, description_, replacementDic_)) + row = c.fetchone() + choiceIndex += 1 + sqlLock.release() + #print "choices", self.choices + + preConfiguredChain = self.checkForPreconfiguredXML() + if preConfiguredChain != None: + if preConfiguredChain != waitingOnTimer: + #time.sleep(archivematicaMCP.config.getint('MCPServer', "waitOnAutoApprove")) + #print "checking for xml file for processing rules. TODO" + self.jobChainLink.setExitMessage("Completed successfully") + #jobChain.jobChain(self.unit, preConfiguredChain) + rd = replacementDic(eval(preConfiguredChain)) + if self.jobChainLink.passVar != None: + if isinstance(self.jobChainLink.passVar, list): + found = False + for passVar in self.jobChainLink.passVar: + if isinstance(self.jobChainLink.passVar, replacementDic): + new = {} + new.update(self.jobChainLink.passVar.dic) + new.update(rd.dic) + rd.dic = [new] + found = True + break + if not found: + self.jobChainLink.passVar.append(rd) + rd = self.jobChainLink.passVar + else: + rd = [rd] + self.jobChainLink.linkProcessingComplete(0, rd) + else: + print "waiting on delay to resume processing on unit:", unit + else: + choicesAvailableForUnitsLock.acquire() + self.jobChainLink.setExitMessage('Awaiting decision') + choicesAvailableForUnits[self.jobChainLink.UUID] = self + choicesAvailableForUnitsLock.release() + + def checkForPreconfiguredXML(self): + ret = None + xmlFilePath = os.path.join( \ + self.unit.currentPath.replace("%sharedPath%", archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1) + "/", \ + archivematicaMCP.config.get('MCPServer', "processingXMLFile") \ + ) + + if os.path.isfile(xmlFilePath): + # For a list of items with pks: + # SELECT TasksConfigs.description, choiceAvailableAtLink, ' ' AS 'SPACE', MicroServiceChains.description, chainAvailable FROM MicroServiceChainChoice Join MicroServiceChains on MicroServiceChainChoice.chainAvailable = MicroServiceChains.pk Join MicroServiceChainLinks on MicroServiceChainLinks.pk = MicroServiceChainChoice.choiceAvailableAtLink Join TasksConfigs on TasksConfigs.pk = MicroServiceChainLinks.currentTask ORDER BY choiceAvailableAtLink desc; + try: + tree = etree.parse(xmlFilePath) + root = tree.getroot() + for preconfiguredChoice in root.find("preconfiguredChoices"): + #if int(preconfiguredChoice.find("appliesTo").text) == self.jobChainLink.pk: + if preconfiguredChoice.find("appliesTo").text == self.jobChainLink.description: + desiredChoice = preconfiguredChoice.find("goToChain").text + sql = """SELECT MicroServiceChoiceReplacementDic.replacementDic FROM MicroServiceChoiceReplacementDic WHERE MicroServiceChoiceReplacementDic.description = '%s' AND MicroServiceChoiceReplacementDic.choiceAvailableAtLink = %s;""" % (desiredChoice, self.jobChainLink.pk.__str__()) + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + ret = row[0] + row = c.fetchone() + sqlLock.release() + try: + #<delay unitAtime="yes">30</delay> + delayXML = preconfiguredChoice.find("delay") + unitAtimeXML = delayXML.get("unitCtime") + if unitAtimeXML != None and unitAtimeXML.lower() != "no": + delaySeconds=int(delayXML.text) + unitTime = os.path.getmtime(self.unit.currentPath.replace("%sharedPath%", \ + archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1)) + nowTime=time.time() + timeDifference = nowTime - unitTime + timeToGo = delaySeconds - timeDifference + print "time to go:", timeToGo + #print "that will be: ", (nowTime + timeToGo) + self.jobChainLink.setExitMessage("Waiting till: " + datetime.datetime.fromtimestamp((nowTime + timeToGo)).ctime()) + rd = replacementDic(eval(ret)) + if self.jobChainLink.passVar != None: + if isinstance(self.jobChainLink.passVar, replacementDic): + new = {} + new.update(self.jobChainLink.passVar.dic) + new.update(rd.dic) + rd.dic = new + t = threading.Timer(timeToGo, self.jobChainLink.linkProcessingComplete, args=[0, rd], kwargs={}) + t.daemon = True + t.start() + + t2 = threading.Timer(timeToGo, self.jobChainLink.setExitMessage, args=["Completed successfully"], kwargs={}) + t2.start() + return waitingOnTimer + + except Exception as inst: + print >>sys.stderr, "Error parsing xml:" + print >>sys.stderr, type(inst) + print >>sys.stderr, inst.args + + except Exception as inst: + print >>sys.stderr, "Error parsing xml:" + print >>sys.stderr, type(inst) + print >>sys.stderr, inst.args + return ret + + def xmlify(self): + print "xmlify" + ret = etree.Element("choicesAvailableForUnit") + etree.SubElement(ret, "UUID").text = self.jobChainLink.UUID + ret.append(self.unit.xmlify()) + choices = etree.SubElement(ret, "choices") + for chainAvailable, description, rd in self.choices: + choice = etree.SubElement(choices, "choice") + etree.SubElement(choice, "chainAvailable").text = chainAvailable.__str__() + etree.SubElement(choice, "description").text = description + print etree.tostring(ret) + return ret + + + + def proceedWithChoice(self, index): + choicesAvailableForUnitsLock.acquire() + del choicesAvailableForUnits[self.jobChainLink.UUID] + choicesAvailableForUnitsLock.release() + #while archivematicaMCP.transferDMovedFromCounter.value != 0: + # print "Waiting for all files to finish updating their location in the database" + # print transferD.movedFrom + # time.sleep(1) + + #get the one at index, and go with it. + choiceIndex, description, replacementDic2 = self.choices[int(index)] + rd = replacementDic(eval(replacementDic2)) + if self.jobChainLink.passVar != None: + if isinstance(self.jobChainLink.passVar, list): + found = False + for passVar in self.jobChainLink.passVar: + if isinstance(self.jobChainLink.passVar, replacementDic): + new = {} + new.update(self.jobChainLink.passVar.dic) + new.update(rd.dic) + rd.dic = [new] + found = True + break + if not found: + self.jobChainLink.passVar.append(rd) + rd = self.jobChainLink.passVar + else: + rd = [rd] + self.jobChainLink.linkProcessingComplete(0, rd) + \ No newline at end of file diff --git a/src/MCPServer/lib/linkTaskManagerSplit.py b/src/MCPServer/lib/linkTaskManagerSplit.py new file mode 100755 index 0000000000..f39ece35ac --- /dev/null +++ b/src/MCPServer/lib/linkTaskManagerSplit.py @@ -0,0 +1,166 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +from linkTaskManager import linkTaskManager +from taskStandard import taskStandard +from unitFile import unitFile +from passClasses import * +import jobChain +import databaseInterface +import threading +import math +import uuid +import time +import sys +import archivematicaMCP +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseFunctions +from databaseFunctions import deUnicode + +import os + + +class linkTaskManagerSplit: + def __init__(self, jobChainLink, pk, unit): + self.tasks = {} + self.tasksLock = threading.Lock() + self.pk = pk + self.jobChainLink = jobChainLink + self.exitCode = 0 + self.clearToNextLink = False + sql = """SELECT * FROM StandardTasksConfigs where pk = """ + pk.__str__() + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + filterFileEnd = deUnicode(row[1]) + filterFileStart = deUnicode(row[2]) + filterSubDir = deUnicode(row[3]) + requiresOutputLock = row[4] + self.standardOutputFile = deUnicode(row[5]) + self.standardErrorFile = deUnicode(row[6]) + self.execute = deUnicode(row[7]) + self.arguments = deUnicode(row[8]) + row = c.fetchone() + sqlLock.release() + if requiresOutputLock: + outputLock = threading.Lock() + else: + outputLock = None + + SIPReplacementDic = unit.getReplacementDic(unit.currentPath) + + self.tasksLock.acquire() + print "Debug - ", unit.fileList.items() + for file, fileUnit in unit.fileList.items(): + #print "file:", file, fileUnit + if filterFileEnd: + if not file.endswith(filterFileEnd): + continue + if filterFileStart: + if not os.path.basename(file).startswith(filterFileStart): + continue + if filterSubDir: + #print "file", file, type(file) + #print unit.pathString, type(unit.pathString) + #filterSubDir = filterSubDir.encode('utf-8') + #print filterSubDir, type(filterSubDir) + + if not file.startswith(unit.pathString + filterSubDir): + print "skipping file", file, filterSubDir + continue + + standardOutputFile = self.standardOutputFile + standardErrorFile = self.standardErrorFile + execute = self.execute + arguments = self.arguments + + if self.jobChainLink.passVar != None: + if isinstance(self.jobChainLink.passVar, replacementDic): + execute, arguments, standardOutputFile, standardErrorFile = self.jobChainLink.passVar.replace(execute, arguments, standardOutputFile, standardErrorFile) + + commandReplacementDic = fileUnit.getReplacementDic() + for key in commandReplacementDic.iterkeys(): + value = commandReplacementDic[key].replace("\"", ("\\\"")) + #print "key", type(key), key + #print "value", type(value), value + if isinstance(value, unicode): + value = value.encode("utf-8") + #key = key.encode("utf-8") + #value = value.encode("utf-8") + if execute: + execute = execute.replace(key, value) + if arguments: + arguments = arguments.replace(key, value) + if standardOutputFile: + standardOutputFile = standardOutputFile.replace(key, value) + if standardErrorFile: + standardErrorFile = standardErrorFile.replace(key, value) + for key in SIPReplacementDic.iterkeys(): + value = SIPReplacementDic[key].replace("\"", ("\\\"")) + #print "key", type(key), key + #print "value", type(value), value + if isinstance(value, unicode): + value = value.encode("utf-8") + #key = key.encode("utf-8") + #value = value.encode("utf-8") + + if execute: + execute = execute.replace(key, value) + if arguments: + arguments = arguments.replace(key, value) + if standardOutputFile: + standardOutputFile = standardOutputFile.replace(key, value) + if standardErrorFile: + standardErrorFile = standardErrorFile.replace(key, value) + UUID = uuid.uuid4().__str__() + self.tasks[UUID] = None + ## passVar = [{preservationJobUUID, accessJobUUID, thumbnailsJobUUID}] #an idea not in use + t = threading.Thread(target=jobChain.jobChain, args=(fileUnit, execute, self.taskCompletedCallBackFunction,), kwargs={"passVar":self.jobChainLink.passVar, "UUID":UUID, "subJobOf":self.jobChainLink.UUID.__str__()} ) + t.daemon = True + while(archivematicaMCP.limitTaskThreads/2 <= threading.activeCount()): + #print "Waiting for active threads", threading.activeCount() + self.tasksLock.release() + time.sleep(archivematicaMCP.limitTaskThreadsSleep) + self.tasksLock.acquire() + print "Active threads:", threading.activeCount() + t.start() + self.clearToNextLink = True + self.tasksLock.release() + if self.tasks == {} : + self.jobChainLink.linkProcessingComplete(self.exitCode) + + + def taskCompletedCallBackFunction(self, jobChain): + if jobChain.UUID in self.tasks: + del self.tasks[jobChain.UUID] + else: + print >>sys.stderr, "Key Value Error:", jobChain.UUID + print >>sys.stderr, "Key Value Error:", self.tasks + exit(1) + + self.tasksLock.acquire() + if self.clearToNextLink == True and self.tasks == {} : + print "DEBUG proceeding to next link", self.jobChainLink.UUID + self.jobChainLink.linkProcessingComplete(self.exitCode, self.jobChainLink.passVar) + self.tasksLock.release() diff --git a/src/MCPServer/lib/linkTaskManagerSplitOnFileIdAndruleset.py b/src/MCPServer/lib/linkTaskManagerSplitOnFileIdAndruleset.py new file mode 100755 index 0000000000..14c0506852 --- /dev/null +++ b/src/MCPServer/lib/linkTaskManagerSplitOnFileIdAndruleset.py @@ -0,0 +1,121 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +from linkTaskManager import linkTaskManager +from taskStandard import taskStandard +from unitFile import unitFile +from passClasses import * +import jobChain +import databaseInterface +import threading +import math +import uuid +import time +import sys +import archivematicaMCP +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseFunctions +from databaseFunctions import deUnicode + +import os + + +class linkTaskManagerSplitOnFileIdAndruleset: + def __init__(self, jobChainLink, pk, unit): + self.tasks = {} + self.tasksLock = threading.Lock() + self.pk = pk + self.jobChainLink = jobChainLink + self.exitCode = 0 + self.clearToNextLink = False + sql = """SELECT * FROM StandardTasksConfigs where pk = """ + pk.__str__() + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + filterFileEnd = deUnicode(row[1]) + filterFileStart = deUnicode(row[2]) + filterSubDir = deUnicode(row[3]) + requiresOutputLock = row[4] + self.standardOutputFile = deUnicode(row[5]) + self.standardErrorFile = deUnicode(row[6]) + self.execute = deUnicode(row[7]) + self.arguments = deUnicode(row[8]) + row = c.fetchone() + sqlLock.release() + if requiresOutputLock: + outputLock = threading.Lock() + else: + outputLock = None + + SIPReplacementDic = unit.getReplacementDic(unit.currentPath) + + self.tasksLock.acquire() + for file, fileUnit in unit.fileList.items(): + #print "file:", file, fileUnit + if filterFileEnd: + if not file.endswith(filterFileEnd): + continue + if filterFileStart: + if not os.path.basename(file).startswith(filterFileStart): + continue + if filterSubDir: + #print "file", file, type(file) + #print unit.pathString, type(unit.pathString) + #filterSubDir = filterSubDir.encode('utf-8') + #print filterSubDir, type(filterSubDir) + + if not file.startswith(unit.pathString + filterSubDir): + print "skipping file", file, filterSubDir + continue + + standardOutputFile = self.standardOutputFile + standardErrorFile = self.standardErrorFile + execute = self.execute + arguments = self.arguments + + if self.jobChainLink.passVar != None: + if isinstance(self.jobChainLink.passVar, replacementDic): + execute, arguments, standardOutputFile, standardErrorFile = self.jobChainLink.passVar.replace(execute, arguments, standardOutputFile, standardErrorFile) + + fileUUID = unit.UUID + ComandClassification = self.execute + #passVar=self.jobChainLink.passVar + toPassVar = eval(arguments) + toPassVar.update({"%standardErrorFile%":standardErrorFile, "%standardOutputFile%":standardOutputFile, '%commandClassifications%':ComandClassification}) + print "debug", toPassVar + passVar=replacementDic(toPassVar) + sql = """SELECT MicroServiceChainLinks.pk FROM FilesIdentifiedIDs JOIN CommandRelationships ON FilesIdentifiedIDs.fileID = CommandRelationships.fileID JOIN CommandClassifications ON CommandClassifications.pk = CommandRelationships.commandClassification JOIN TasksConfigs ON TasksConfigs.taskTypePKReference = CommandRelationships.pk JOIN MicroServiceChainLinks ON MicroServiceChainLinks.currentTask = TasksConfigs.pk WHERE TasksConfigs.taskType = 8 AND FilesIdentifiedIDs.fileUUID = '%s' AND CommandClassifications.classification = '%s';""" % (fileUUID, ComandClassification) + rows = databaseInterface.queryAllSQL(sql) + if rows and len(rows): + print "DEBUGGING 6772: ", fileUUID, ComandClassification, rows + for row in rows: + jobChainLink.jobChain.nextChainLink(row[0], passVar=passVar, incrementLinkSplit=True, subJobOf=self.jobChainLink.UUID) + else: + sql = """SELECT MicroserviceChainLink FROM DefaultCommandsForClassifications JOIN CommandClassifications ON CommandClassifications.pk = DefaultCommandsForClassifications.forClassification WHERE CommandClassifications.classification = '%s'""" % (ComandClassification) + rows = databaseInterface.queryAllSQL(sql) + print "DEBUGGING2 6772: ", fileUUID, ComandClassification, rows + for row in rows: + jobChainLink.jobChain.nextChainLink(row[0], passVar=passVar, incrementLinkSplit=True, subJobOf=self.jobChainLink.UUID) + + self.jobChainLink.linkProcessingComplete(self.exitCode, passVar=self.jobChainLink.passVar) \ No newline at end of file diff --git a/src/MCPServer/lib/linkTaskManagerTranscoderCommand.py b/src/MCPServer/lib/linkTaskManagerTranscoderCommand.py new file mode 100755 index 0000000000..6aab14941b --- /dev/null +++ b/src/MCPServer/lib/linkTaskManagerTranscoderCommand.py @@ -0,0 +1,128 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +from linkTaskManager import linkTaskManager +from taskStandard import taskStandard +from unitFile import unitFile +from passClasses import * +import databaseInterface +import threading +import math +import uuid +import time +import sys +import archivematicaMCP +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseFunctions +from databaseFunctions import deUnicode + +import os + +global outputLock +outputLock = threading.Lock() + + +class linkTaskManagerTranscoderCommand: + def __init__(self, jobChainLink, pk, unit): + global outputLock + self.tasks = {} + self.tasksLock = threading.Lock() + self.pk = pk + self.jobChainLink = jobChainLink + self.exitCode = 0 + self.clearToNextLink = False + + opts = {"inputFile":"%relativeLocation%", "fileUUID":"%fileUUID%", 'commandClassifications':'%commandClassifications%', "taskUUID":"%taskUUID%", "objectsDirectory":"%SIPObjectsDirectory%", "logsDirectory":"%SIPLogsDirectory%", "sipUUID":"%SIPUUID%", "sipPath":"%SIPDirectory%", "fileGrpUse":"%fileGrpUse%", "normalizeFileGrpUse":"%normalizeFileGrpUse%", "excludeDirectory":"%excludeDirectory%", "standardErrorFile":"%standardErrorFile%", "standardOutputFile":"%standardOutputFile%"} + + SIPReplacementDic = unit.getReplacementDic(unit.currentPath) + for optsKey, optsValue in opts.iteritems(): + if self.jobChainLink.passVar != None: + if isinstance(self.jobChainLink.passVar, replacementDic): + opts[optsKey] = self.jobChainLink.passVar.replace(opts[optsKey])[0] + + commandReplacementDic = unit.getReplacementDic() + for key, value in commandReplacementDic.iteritems(): + opts[optsKey] = opts[optsKey].replace(key, value) + + for key, value in SIPReplacementDic.iteritems(): + opts[optsKey] = opts[optsKey].replace(key, value) + + self.tasksLock.acquire() + commandReplacementDic = unit.getReplacementDic() + sql = """SELECT CommandRelationships.pk FROM CommandRelationships JOIN Commands ON CommandRelationships.command = Commands.pk WHERE CommandRelationships.pk = %s;""" % (pk.__str__()) + rows = databaseInterface.queryAllSQL(sql) + taskCount = 0 + if rows: + for row in rows: + UUID = uuid.uuid4().__str__() + opts["taskUUID"] = UUID + opts["CommandRelationship"] = pk.__str__() + execute = "transcoder_cr%d" % (pk) + deUnicode(execute) + arguments = row.__str__() + standardOutputFile = opts["standardOutputFile"] + standardErrorFile = opts["standardErrorFile"] + self.standardOutputFile = standardOutputFile + self.standardErrorFile = standardErrorFile + self.execute = execute + self.arguments = arguments + task = taskStandard(self, execute, opts, standardOutputFile, standardErrorFile, outputLock=outputLock, UUID=UUID) + self.tasks[UUID] = task + databaseFunctions.logTaskCreatedSQL(self, commandReplacementDic, UUID, arguments) + t = threading.Thread(target=task.performTask) + t.daemon = True + while(archivematicaMCP.limitTaskThreads <= threading.activeCount()): + #print "Waiting for active threads", threading.activeCount() + self.tasksLock.release() + time.sleep(archivematicaMCP.limitTaskThreadsSleep) + self.tasksLock.acquire() + print "Active threads:", threading.activeCount() + taskCount += 1 + t.start() + + + self.clearToNextLink = True + self.tasksLock.release() + if taskCount == 0: + self.jobChainLink.linkProcessingComplete(self.exitCode) + + + def taskCompletedCallBackFunction(self, task): + print task + #logTaskCompleted() + self.exitCode += math.fabs(task.results["exitCode"]) + databaseFunctions.logTaskCompletedSQL(task) + + if task.UUID in self.tasks: + del self.tasks[task.UUID] + else: + print >>sys.stderr, "Key Value Error:", task.UUID + print >>sys.stderr, "Key Value Error:", self.tasks + exit(1) + + self.tasksLock.acquire() + if self.clearToNextLink == True and self.tasks == {} : + print "DEBUG proceeding to next link", self.jobChainLink.UUID + self.jobChainLink.linkProcessingComplete(self.exitCode, self.jobChainLink.passVar) + self.tasksLock.release() diff --git a/src/MCPServer/lib/passClasses.py b/src/MCPServer/lib/passClasses.py new file mode 100755 index 0000000000..01f2f4ddeb --- /dev/null +++ b/src/MCPServer/lib/passClasses.py @@ -0,0 +1,42 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +class replacementDic: + def __init__(self, dictionary): + self.dic = dictionary + + def replace(self, *a): + ret = [] + for orig in a: + new = orig + if orig != None: + for key, value in self.dic.iteritems(): + orig = orig.replace(key, value) + ret.append(orig) + return ret + + +class choicesDic: + def __init__(self, dictionary): + self.dic = dictionary \ No newline at end of file diff --git a/src/MCPServer/lib/plantUMLTextGenerator/MCPplantUML.sh b/src/MCPServer/lib/plantUMLTextGenerator/MCPplantUML.sh new file mode 100755 index 0000000000..1ac3ead9a7 --- /dev/null +++ b/src/MCPServer/lib/plantUMLTextGenerator/MCPplantUML.sh @@ -0,0 +1,10 @@ +if [ ! -f /usr/bin/dot ]; then + sudo apt-get install graphviz +fi +rm ./*.png +set -e +./main.py +java -jar ./../../../archivematicaCommon/lib/externals/plantUML/plantuml.jar ./plantUML.txt +ls + + diff --git a/src/MCPServer/lib/plantUMLTextGenerator/main.py b/src/MCPServer/lib/plantUMLTextGenerator/main.py new file mode 100755 index 0000000000..fd5011d307 --- /dev/null +++ b/src/MCPServer/lib/plantUMLTextGenerator/main.py @@ -0,0 +1,207 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer-plantUMLTextGenerator +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +#sudo apt-get install graphviz + +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface + +f = open('plantUML.txt', 'w') +global processedJobChainLinks +processedJobChainLinks = [] +subChains = {} +def writePlant(*items): + p = "" + for str in items: + p = "%s%s" % (p, str.__str__()) + print p + f.write(p) + f.write("\n") + +def jobChainLinkExitCodesTextGet(indent, exitCode, nextMicroServiceChainLink, exitMessage, set): + leadIn = "" + if set: + leadIn = "->[false]" + writePlant( indent, leadIn, """if "exitCodeIs %s" then""" % (exitCode.__str__())) + if nextMicroServiceChainLink: + jobChainLinkTextGet(indent + " ", "-->[true]", nextMicroServiceChainLink, label = "") + else: + writePlant(indent + " ", """-->[true] "End Of Chain" """) + + +def jobChainLinkTextGet(indent, leadIn, pk, label = ""): + global subChains + global processedJobChainLinks + sql = """SELECT MicroServiceChainLinks.currentTask, MicroServiceChainLinks.defaultNextChainLink, TasksConfigs.taskType, TasksConfigs.taskTypePKReference, TasksConfigs.description, MicroServiceChainLinks.reloadFileList, Sounds.fileLocation, MicroServiceChainLinks.defaultExitMessage, MicroServiceChainLinks.microserviceGroup, StandardTasksConfigs.execute FROM MicroServiceChainLinks LEFT OUTER JOIN Sounds ON MicroServiceChainLinks.defaultPlaySound = Sounds.pk JOIN TasksConfigs on MicroServiceChainLinks.currentTask = TasksConfigs.pk LEFT OUTER JOIN StandardTasksConfigs ON TasksConfigs.taskTypePKReference = StandardTasksConfigs.pk WHERE MicroServiceChainLinks.pk = '%s';""" % (pk.__str__()) + print sql + rows = databaseInterface.queryAllSQL(sql) + for row in rows: + currentTask = row[0] + defaultNextChainLink = row[1] + taskType = row[2] + taskTypePKReference = row[3] + description = row[4] + reloadFileList = row[5] + defaultSoundFile = row[6] + defaultExitMessage = row[7] + microserviceGroup = row[8] + execute = row[9] + + if taskType == 3: + sql = """SELECT execute FROM StandardTasksConfigs WHERE pk = %d; """ % (taskTypePKReference) + rows = databaseInterface.queryAllSQL(sql) + leadOut = "%d. %s" % (pk, description) + if label != "": + writePlant( ("%s%s \"%s %s - Assign Magic Link %s\"") % (indent, leadIn , label, leadOut, rows[0][0].__str__()) ) + else: + writePlant( ("%s%s \"%s - Assign Magic Link %s\"") % (indent, leadIn, leadOut, rows[0][0].__str__()) ) + + if pk in processedJobChainLinks: + return + else: + processedJobChainLinks.append(pk) + else: + leadOut = "%d. %s" % (pk, description) + if label != "": + writePlant( ("%s%s \"%s %s\"") % (indent, leadIn , label, leadOut) ) + else: + writePlant( ("%s%s \"%s\"") % (indent, leadIn, leadOut) ) + + if pk in processedJobChainLinks: + return + else: + processedJobChainLinks.append(pk) + + if taskType == 0 or taskType == 1 or taskType == 3 or taskType == 5 or taskType == 6 or taskType == 7: #| 0 | one instance | 1 | for each file | + sql = """SELECT exitCode, nextMicroServiceChainLink, exitMessage FROM MicroServiceChainLinksExitCodes WHERE microServiceChainLink = '%s';""" % (pk.__str__()) + rows2 = databaseInterface.queryAllSQL(sql) + set = False + ifindent = indent + " " + for row2 in rows2: + if set: + #writePlant( indent + " ", """endif""") + writePlant( ifindent[:-1], """else""") + exitCode = row2[0] + nextMicroServiceChainLink = row2[1] + exitMessage = row2[2] + jobChainLinkExitCodesTextGet(ifindent, exitCode, nextMicroServiceChainLink, exitMessage, set) + set = True + ifindent = ifindent + " " + + if set: + writePlant( ifindent, """else""") + writePlant( ifindent, """->[false] if "%d. default" then """ % (pk) ) + else: + writePlant( ifindent, """ if "%d. default" """ % (pk) ) + + if defaultNextChainLink: + jobChainLinkTextGet(ifindent + " ", "-->[true]", defaultNextChainLink, label="") + else: + writePlant( ifindent, """-->[true] "End Of Chain" """ ) + while ifindent != indent + " ": + writePlant( ifindent + " ", """endif""") + ifindent = ifindent[:-1] + writePlant( ifindent, """endif""" ) + + if taskType == 6: + subChains[execute] = None #tag the sub chain to proceed down + + elif taskType == 2: # + sql = """SELECT description, chainAvailable from MicroServiceChainChoice Join MicroServiceChains ON MicroServiceChainChoice.chainAvailable = MicroServiceChains.pk WHERE choiceAvailableAtLink = %d;""" % (pk) + print sql + rows2 = databaseInterface.queryAllSQL(sql) + first = True + ifindent = indent + for row2 in rows2: + leadIn = "->[false]" + if first: + leadIn = "" + first = False + else: + writePlant( ifindent[:-1], "else") + writePlant( ifindent, leadIn, """if "select %s" then""" % (row2[0])) + ifindent = ifindent + " " + leadOut = "-->[true]" + jobChainTextGet(leadOut, row2[1], indent=ifindent+" ") + + elif taskType == 4: + writePlant( indent, leadIn, """ "Load Magic Link" """) + writePlant( indent, "-->[Load Magic Link] (*)") + + +def jobChainTextGet(leadIn, pk, indent=""): + sql = """SELECT startingLink, description FROM MicroServiceChains WHERE pk = '%s';""" % (pk.__str__()) + rows = databaseInterface.queryAllSQL(sql) + for row in rows: + startingLink = row[0] + description = row[1] + leadOut = "-->[" + description + " MicroServiceChain]" + writePlant( ("%s \"%s\"") % (leadIn, description + " MicroServiceChain") ) + jobChainLinkTextGet(indent, leadOut, startingLink) + + +def createWatchedDirectories(): + global processedJobChainLinks + sql = """SELECT watchedDirectoryPath, chain, expectedType FROM WatchedDirectories;""" + rows = databaseInterface.queryAllSQL(sql) + i = 1 + for row in rows: + watchedDirectoryPath = row[0] + chain = row[1] + expectedType = row[2] + writePlant( "@startuml WatchedDirectory-", watchedDirectoryPath.replace("%watchDirectoryPath%", "").replace("/", "_") + ".png" ) #img/activity_img10.png + writePlant( "title " + watchedDirectoryPath ) + jobChainTextGet("(*) --> [" + watchedDirectoryPath + "]" , chain) + writePlant( "@enduml" ) + i+=1 + +def createLoadMagic(): + global processedJobChainLinks + sql = """SELECT TasksConfigs.description, StandardTasksConfigs.execute FROM TasksConfigs JOIN StandardTasksConfigs ON TasksConfigs.taskTypePKReference = StandardTasksConfigs.pk WHERE TasksConfigs.taskType = 3;""" + rows = databaseInterface.queryAllSQL(sql) + i = 1 + for row in rows: + description = row[0] + chainLink = row[1] + processedJobChainLinks = [] + writePlant( "@startuml LoadMagicLink-", description, "-", chainLink ,".png" ) #img/activity_img10.png + writePlant( "title ", description, "-", chainLink ) + jobChainLinkTextGet("", "(*) --> [" + description + "]", int(chainLink), label = "") + writePlant( "@enduml" ) + i+=1 + +def createSubChains(): + global subChains + for chain in subChains.iterkeys(): + writePlant( "@startuml SubChain-", chain.__str__() + ".png" ) #img/activity_img10.png + writePlant( "title " + chain ) + jobChainTextGet("(*) --> [" + chain + "]" , chain) + writePlant( "@enduml" ) + + +if __name__ == '__main__': + createWatchedDirectories() + createLoadMagic() + createSubChains() \ No newline at end of file diff --git a/src/MCPServer/lib/plantUMLTextGenerator/plantUML b/src/MCPServer/lib/plantUMLTextGenerator/plantUML new file mode 100644 index 0000000000..33a6cf3570 --- /dev/null +++ b/src/MCPServer/lib/plantUMLTextGenerator/plantUML @@ -0,0 +1,235 @@ +@startumlimg/1.png +title %watchDirectoryPath%uploadDIP/ +(*) --> [%watchDirectoryPath%uploadDIP/] "-->[uploadDIP MicroServiceChain]" +-->[uploadDIP MicroServiceChain] "5. Upload DIP" + if "5. default" + -->[True] "End Of Chain" + endif +@enduml +@startumlimg/2.png +title %watchDirectoryPath%storeAIP/ +(*) --> [%watchDirectoryPath%storeAIP/] "-->[storeAIP MicroServiceChain]" +-->[storeAIP MicroServiceChain] "9. Store AIP" + if "9. default" + -->[True] "End Of Chain" + endif +@enduml +@startumlimg/3.png +title %watchDirectoryPath%approveNormalization/preservationAndAccess/ +(*) --> [%watchDirectoryPath%approveNormalization/preservationAndAccess/] "-->[approveNormalization MicroServiceChain]" +-->[approveNormalization MicroServiceChain] "35. Approve normalization" + if "35. default" + -->[True] "End Of Chain" + endif +@enduml +@startumlimg/4.png +title %watchDirectoryPath%approveNormalization/preservation/ +(*) --> [%watchDirectoryPath%approveNormalization/preservation/] "-->[approveNormalization MicroServiceChain]" +-->[approveNormalization MicroServiceChain] "67. Approve normalization" + if "67. default" + -->[True] "End Of Chain" + endif +@enduml +@startumlimg/5.png +title %watchDirectoryPath%workFlowDecisions/createDip/ +(*) --> [%watchDirectoryPath%workFlowDecisions/createDip/] "-->[Create DIP ? MicroServiceChain]" +-->[Create DIP ? MicroServiceChain] "74. Find options to normalize as" + if "74. default" + -->[true] "true 1. Move to the failed directory" + if "exitCodeIs 0" then + -->[true] "End Of Chain" + else + ->[false] if "1. default" then + -->[True] "End Of Chain" + endif + endif + endif +@enduml +@startumlimg/6.png +title %watchDirectoryPath%system/autoProcessSIP +(*) --> [%watchDirectoryPath%system/autoProcessSIP] "-->[SIP Creation complete MicroServiceChain]" +-->[SIP Creation complete MicroServiceChain] "89. Set file permissions" + if "exitCodeIs 0" then + -->[true] "88. Move to processing directory" + if "exitCodeIs 0" then + -->[true] "87. Verify SIP compliance" + if "exitCodeIs 0" then + -->[true] "85. Rename SIP directory with SIP UUID" + if "exitCodeIs 0" then + -->[true] "84. Include default SIP processingMCP.xml" + if "exitCodeIs 0" then + -->[true] "83. Remove thumbs.db files" + if "exitCodeIs 0" then + -->[true] "82. Set file permissions" + if "exitCodeIs 0" then + -->[true] "81. Sanitize object's file and directory names" + if "exitCodeIs 0" then + -->[true] "80. Sanitize SIP name" + if "exitCodeIs 0" then + -->[true] "79. Check for Service directory" + if "exitCodeIs 0" then + -->[true] "78. Check for Access directory" + if "exitCodeIs 0" then + -->[true] "76. Grant normalization options for no pre-existing DIP" + if "exitCodeIs 0" then + -->[true] "75. Move to workFlowDecisions-createDip directory" + if "exitCodeIs 0" then + -->[true] "End Of Chain" + else + ->[false] if "75. default" then + -->[True] "End Of Chain" + endif + endif + else + ->[false] if "76. default" then + -->[true] "true 1. Move to the failed directory" + endif + endif + else + ->[false]if "exitCodeIs 179" then + -->[true] "77. Grant normalization options for pre-existing DIP" + if "exitCodeIs 0" then + -->[true] "75. Move to workFlowDecisions-createDip directory" + else + ->[false] if "77. default" then + -->[true] "true 1. Move to the failed directory" + endif + endif + else + ->[false] if "78. default" then + -->[true] "true 1. Move to the failed directory" + endif + endif + endif + else + ->[false] if "79. default" then + -->[true] "true 78. Check for Access directory" + endif + endif + else + ->[false] if "80. default" then + -->[true] "true 1. Move to the failed directory" + endif + endif + else + ->[false] if "81. default" then + -->[true] "true 1. Move to the failed directory" + endif + endif + else + ->[false] if "82. default" then + -->[true] "true 1. Move to the failed directory" + endif + endif + else + ->[false] if "83. default" then + -->[true] "true 82. Set file permissions" + endif + endif + else + ->[false] if "84. default" then + -->[true] "true 1. Move to the failed directory" + endif + endif + else + ->[false] if "85. default" then + -->[true] "true 1. Move to the failed directory" + endif + endif + else + ->[false] if "87. default" then + -->[true] "true 86. Failed compliance. See output in dashboard. SIP moved back to SIPsUnderConstruction" + if "exitCodeIs 0" then + -->[true] "End Of Chain" + else + ->[false] if "86. default" then + -->[True] "End Of Chain" + endif + endif + endif + endif + else + ->[false] if "88. default" then + -->[true] "true 1. Move to the failed directory" + endif + endif + else + ->[false] if "89. default" then + -->[true] "true 1. Move to the failed directory" + endif + endif +@enduml +@startumlimg/7.png +title %watchDirectoryPath%SIPCreation/SIPsUnderConstruction +(*) --> [%watchDirectoryPath%SIPCreation/SIPsUnderConstruction] "-->[SIP Creation MicroServiceChain]" +-->[SIP Creation MicroServiceChain] "90. Approve SIP Creation" + if "90. default" + -->[True] "End Of Chain" + endif +@enduml +@startumlimg/8.png +title %watchDirectoryPath%SIPCreation/completedTransfers/ +(*) --> [%watchDirectoryPath%SIPCreation/completedTransfers/] "-->[Check transfer directory for objects MicroServiceChain]" +-->[Check transfer directory for objects MicroServiceChain] "101. Check transfer directory for objects" + if "exitCodeIs 0" then + -->[true] "End Of Chain" + else + ->[false]if "exitCodeIs 179" then + -->[true] "100. Create SIP(s)" + if "100. default" + -->[True] "End Of Chain" + endif + else + ->[false] if "101. default" then + -->[True] "End Of Chain" + endif + endif + endif +@enduml +@startumlimg/9.png +title %watchDirectoryPath%quarantined/ +(*) --> [%watchDirectoryPath%quarantined/] "-->[Unquarantine MicroServiceChain]" +-->[Unquarantine MicroServiceChain] "114. Find type to remove from quarantine as" + if "114. default" + -->[true] "true 91. Move to the failed directory" + if "exitCodeIs 0" then + -->[true] "End Of Chain" + else + ->[false] if "91. default" then + -->[True] "End Of Chain" + endif + endif + endif +@enduml +@startumlimg/10.png +title %watchDirectoryPath%workFlowDecisions/quarantineSIP/ +(*) --> [%watchDirectoryPath%workFlowDecisions/quarantineSIP/] "-->[quarantineSIP ? MicroServiceChain]" +-->[quarantineSIP ? MicroServiceChain] "120. Find type to process as" + if "120. default" + -->[true] "true 91. Move to the failed directory" + endif +@enduml +@startumlimg/11.png +title %watchDirectoryPath%workFlowDecisions/createTransferBackup/ +(*) --> [%watchDirectoryPath%workFlowDecisions/createTransferBackup/] "-->[create transfer backup ? MicroServiceChain]" +-->[create transfer backup ? MicroServiceChain] "132. Find type to process as" + if "132. default" + -->[true] "true 91. Move to the failed directory" + endif +@enduml +@startumlimg/12.png +title %watchDirectoryPath%activeTransfers/standardTransfer +(*) --> [%watchDirectoryPath%activeTransfers/standardTransfer] "-->[Transfers In progress MicroServiceChain]" +-->[Transfers In progress MicroServiceChain] "142. Approve transfer" + if "142. default" + -->[True] "End Of Chain" + endif +@enduml +@startumlimg/13.png +title %watchDirectoryPath%activeTransfers/Dspace1.7 +(*) --> [%watchDirectoryPath%activeTransfers/Dspace1.7] "-->[DSpace 1.7 Transfers In progress MicroServiceChain]" +-->[DSpace 1.7 Transfers In progress MicroServiceChain] "180. Approve DSpace 1.7 transfer" + if "180. default" + -->[True] "End Of Chain" + endif +@enduml diff --git a/src/MCPServer/lib/task.py b/src/MCPServer/lib/task.py new file mode 100755 index 0000000000..9a71ba83ea --- /dev/null +++ b/src/MCPServer/lib/task.py @@ -0,0 +1,23 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ diff --git a/src/MCPServer/lib/taskStandard.py b/src/MCPServer/lib/taskStandard.py new file mode 100755 index 0000000000..964cbb4d6d --- /dev/null +++ b/src/MCPServer/lib/taskStandard.py @@ -0,0 +1,137 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +import uuid +import gearman +import cPickle +import datetime +import archivematicaMCP +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from fileOperations import writeToFile + + +# ~Class Task~ +#Tasks are what are assigned to clients. +#They have a zero-many(tasks) TO one(job) relationship +#This relationship is formed by storing a pointer to it's owning job in its job variable. +#They use a "replacement dictionary" to define variables for this task. +#Variables used for the task are defined in the Job's configuration/module (The xml file) +class taskStandard(): + """A task is an instance of a command, operating on an entire directory, or a single file.""" + + def __init__(self, linkTaskManager, execute, arguments, standardOutputFile, standardErrorFile, outputLock=None, UUID=None): + if UUID == None: + UUID = uuid.uuid4().__str__() + self.UUID = UUID + self.linkTaskManager = linkTaskManager + self.execute = execute.encode( "utf-8" ) + #print >>sys.stderr, "EXECUTE: ", self.execute, type(self.execute), self.UUID, type(self.UUID) + self.arguments = arguments + self.standardOutputFile = standardOutputFile + self.standardErrorFile = standardErrorFile + self.outputLock = outputLock + + + print "init done" + + def performTask(self): + from archivematicaMCP import limitGearmanConnectionsSemaphore + limitGearmanConnectionsSemaphore.acquire() + gm_client = gearman.GearmanClient([archivematicaMCP.config.get('MCPServer', "MCPArchivematicaServer")]) + data = {"createdDate" : datetime.datetime.now().__str__()} + data["arguments"] = self.arguments + print '"'+self.execute+'"', data + completed_job_request = gm_client.submit_job(self.execute.lower(), cPickle.dumps(data), self.UUID) + limitGearmanConnectionsSemaphore.release() + self.check_request_status(completed_job_request) + print "DEBUG: FINISHED PERFORMING TASK: ", self.UUID + exit(0) + + def check_request_status(self, job_request): + if job_request.complete: + self.results = cPickle.loads(job_request.result) + print "Task %s finished! Result: %s - %s" % (job_request.job.unique, job_request.state, self.results) + self.writeOutputs() + self.linkTaskManager.taskCompletedCallBackFunction(self) + + elif job_request.timed_out: + print >>sys.stderr, "Task %s timed out!" % job_request.unique + self.results['exitCode'] = -1 + self.results["stdError"] = "Task %s timed out!" % job_request.unique + self.linkTaskManager.taskCompletedCallBackFunction(self) + + elif job_request.state == JOB_UNKNOWN: + print >>sys.stderr, "Task %s connection failed!" % job_request.unique + self.results["stdError"] = "Task %s connection failed!" % job_request.unique + self.results['exitCode'] = -1 + self.linkTaskManager.taskCompletedCallBackFunction(self) + + else: + print >>sys.stderr, "Task %s failed!" % job_request.unique + self.results["stdError"] = "Task %s failed!" % job_request.unique + self.results['exitCode'] = -1 + self.linkTaskManager.taskCompletedCallBackFunction(self) + + + + + + #This function is used to verify that where + #the MCP is writing to is an allowable location + #@fileName - full path of file it wants to validate. + def writeOutputsValidateOutputFile(self, fileName): + ret = fileName + if ret: + if "%sharedPath%" in ret and "../" not in ret: + ret = ret.replace("%sharedPath%", archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1) + else: + ret = "<^Not allowed to write to file^> " + ret + return ret + + #Used to write the output of the commands to the specified files + def writeOutputs(self): + """Used to write the output of the commands to the specified files""" + + + if self.outputLock != None: + self.outputLock.acquire() + + standardOut = self.writeOutputsValidateOutputFile(self.standardOutputFile) + standardError = self.writeOutputsValidateOutputFile(self.standardErrorFile) + + #output , filename + a = writeToFile(self.results["stdOut"], standardOut) + b = writeToFile(self.results["stdError"], standardError) + + if self.outputLock != None: + self.outputLock.release() + + if a: + self.stdError = "Failed to write to file{" + standardOut + "}\r\n" + self.results["stdOut"] + if b: + self.stdError = "Failed to write to file{" + standardError + "}\r\n" + self.results["stdError"] + if self.results['exitCode']: + return self.results['exitCode'] + return a + b diff --git a/src/MCPServer/lib/transferD.py b/src/MCPServer/lib/transferD.py new file mode 100755 index 0000000000..3407fc3a48 --- /dev/null +++ b/src/MCPServer/lib/transferD.py @@ -0,0 +1,462 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +#Related Docs +# http://pyinotify.sourceforge.net/doc-v07/index.html +# http://code.google.com/p/archivematica/source/browse/trunk/src/transfer/lib/transferD.py?r=1656 + +#Variables to move to config file + + +#imports +import pyinotify +import uuid +import MySQLdb +import sys +import os +import copy +import threading +import time + +import archivematicaMCP +from unitTransfer import unitTransfer +from unitSIP import unitSIP + +completedTransfersDirectory = "/var/archivematica/sharedDirectory/watchedDirectories/SIPCreation/completedTransfers/" +sipCreationDirectory = "/var/archivematica/sharedDirectory/watchedDirectories/SIPCreation/SIPsUnderConstruction/" + + +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +from databaseFunctions import fileWasRemoved +from externals.singleInstance import singleinstance + +#Local Variables +mask = pyinotify.IN_CREATE | pyinotify.IN_MODIFY | pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO | pyinotify.IN_DELETE | pyinotify.IN_MOVE_SELF | pyinotify.IN_DELETE_SELF +#wm = pyinotify.WatchManager() +movedFrom = {} #cookie +global movedFromCount +movedFromCount = 0 +movedFromLock = threading.Lock() + +def timerExpired(event, utcDate): + global movedFromCount + movedFromLock.acquire() + if event.cookie in movedFrom: + #remove it from the list of unfound moves + movedFromPath, filesMoved, timer = movedFrom.pop(event.cookie) + movedFromCount.value = movedFromCount.value - 1 + movedFromLock.release() + for fileUUID, oldLocation in filesMoved: + fileWasRemoved(fileUUID, utcDate = utcDate, eventOutcomeDetailNote = "removed from: " + oldLocation) + else: + movedFromLock.release() + +class SIPWatch(pyinotify.ProcessEvent): + def __init__(self, unit, wm): + self.unit=unit + self.wm = wm + #if a file is moved in, look for a cookie to claim + #if there isn't one - error + #error. No adding files to a sip in this manner. + #else + #Update the file to be linked to this SIP + + #if the SIP is moved/removed + #??? + + #if a file is moved in, look for a cookie to claim + def process_IN_MOVED_TO(self, event): + global movedFromCount + t = threading.Thread(target=self.threaded_process_IN_MOVED_TO, args=(event,)) + t.daemon = True + t.start() + + def threaded_process_IN_MOVED_TO(self, event): + time.sleep(archivematicaMCP.config.getint('MCPServer', "waitToActOnMoves")) + #print event + #print "SIP IN_MOVED_TO" + movedFromLock.acquire() + if event.cookie not in movedFrom: + #if there isn't one - error + #print event.cookie, movedFrom + #print >>sys.stderr, "#error. No adding files to a sip in this manner." + movedFromLock.release() + return + + #remove it from the list of unfound moves + movedFromPath, filesMoved, timer = movedFrom.pop(event.cookie) + movedFromCount.value = movedFromCount.value - 1 + movedFromLock.release() + + movedToPath = os.path.join(event.path, event.name).replace(\ + self.unit.currentPath.replace("%sharedPath%", archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1), \ + "%SIPDirectory%", 1) + for fileUUID, oldLocation in filesMoved: + newFilePath = oldLocation.replace(movedFromPath, movedToPath, 1) + #print "Moved: ", oldLocation, "-> (" + self.unit.UUID + ")" + newFilePath + databaseInterface.runSQL("UPDATE Files " + \ + "SET currentLocation='" + newFilePath + "', " + \ + "Files.sipUUID = '" + self.unit.UUID + "' " \ + "WHERE fileUUID='" + fileUUID + "'" ) + + def process_IN_MOVED_FROM(self, event): + global movedFromCount + #print event + #print "SIP IN_MOVED_FROM" + #Wait for a moved to, and if one doesn't occur, consider it moved outside of the system. + + #print "unit current path: ", self.unit.currentPath + movedFromPath = os.path.join(event.path, event.name).replace(\ + self.unit.currentPath.replace("%sharedPath%", archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1), \ + "%SIPDirectory%", 1) + filesMoved = [] + sql = """SELECT fileUUID, currentLocation FROM Files WHERE sipUUID = '""" + self.unit.UUID + "' AND removedTime = 0 AND currentLocation LIKE '" + MySQLdb.escape_string(movedFromPath).replace("%", "\%") + "%';" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + #print row + filesMoved.append(row) + row = c.fetchone() + sqlLock.release() + + movedFromLock.acquire() + utcDate = databaseInterface.getUTCDate() + timer = threading.Timer(archivematicaMCP.config.getint('MCPServer', "delayTimer"), timerExpired, args=[event, utcDate], kwargs={}) + movedFrom[event.cookie] = (movedFromPath, filesMoved, timer) + movedFromCount.value = movedFromCount.value + 1 + movedFromLock.release() + + #create timer to check if it's claimed by a move to + timer.start() + + + def process_IN_DELETE(self, event): + #print event + #print "SIP IN_DELETE" + #Wait for a moved to, and if one doesn't occur, consider it moved outside of the system. + + movedFromPath = os.path.join(event.path, event.name).replace(\ + self.unit.currentPath.replace("%sharedPath%", archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1), \ + "%SIPDirectory%", 1) + filesMoved = [] + sql = """SELECT fileUUID, currentLocation FROM Files WHERE sipUUID = '""" + self.unit.UUID + "' AND removedTime = 0 AND currentLocation LIKE '" + MySQLdb.escape_string(movedFromPath).replace("%", "\%") + "%';" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + filesMoved.append(row) + row = c.fetchone() + sqlLock.release() + for fileUUID, currentLocation in filesMoved: + fileWasRemoved(fileUUID, eventOutcomeDetailNote = "removed from: " + currentLocation) + + if event.pathname + "/" == self.unit.currentPath.replace("%sharedPath%", archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1): + #print "stopped notifier for: ", self.unit.currentPath + self.notifier.stop() + + def process_IN_MOVE_SELF(self, event): + #print event + #print "SIP IN_MOVE_SELF" + path = event.pathname + wdrm = [event.wd] + if path.endswith("-unknown-path"): + recrm = path[:path.rfind("-unknown-path")] + "/" + else: + recrm = path + "/" + for key, watch in self.wm.watches.iteritems(): + if watch.path.startswith(recrm): + wdrm.append(watch.wd) + #print "Watch directory: ", event.wd, self.wm.get_path(event.wd) + #print "Removing watch directory: ", event.pathname + #wd = self.wm.get_wd(event.pathname) + rr = self.wm.rm_watch(wdrm, rec=False) + #print "rr: ", rr + #print self.wm + #self.notifier.stop() + if recrm == self.unit.currentPath.replace("%sharedPath%", archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1): + #print "stopped notifier for: ", self.unit.currentPath + self.notifier.stop() + + def process_IN_DELETE_SELF(self, event): + if event.pathname + "/" == self.unit.currentPath.replace("%sharedPath%", archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1): + #print "stopped notifier for: ", self.unit.currentPath + self.notifier.stop() + + + +class transferWatch(pyinotify.ProcessEvent): + def __init__(self, unit, wm): + self.unit=unit + self.wm = wm + + #when a file is moved out, create a cookie for the file, with the file uuid + #and a timer, so if it isn't claimed, the cookie is removed. + def process_IN_MOVED_FROM(self, event): + global movedFromCount + #print event + #print "Transfer IN_MOVED_FROM" + #Wait for a moved to, and if one doesn't occur, consider it moved outside of the system. + + + movedFromPath = os.path.join(event.path, event.name).replace(\ + self.unit.currentPath.replace("%sharedPath%", archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1), \ + "%transferDirectory%", 1) + filesMoved = [] + sql = """SELECT fileUUID, currentLocation FROM Files WHERE transferUUID = '""" + self.unit.UUID + "' AND removedTime = 0 AND currentLocation LIKE '" + MySQLdb.escape_string(movedFromPath).replace("%", "\%") + "%';" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + #print row + filesMoved.append(row) + row = c.fetchone() + sqlLock.release() + + movedFromLock.acquire() + utcDate = databaseInterface.getUTCDate() + timer = threading.Timer(archivematicaMCP.config.getint('MCPServer', "delayTimer"), timerExpired, args=[event, utcDate], kwargs={}) + movedFrom[event.cookie] = (movedFromPath, filesMoved, timer) + movedFromCount.value = movedFromCount.value + 1 + movedFromLock.release() + + #create timer to check if it's claimed by a move to + timer.start() + + #print "Watch directory: ", event.wd, wm.get_path(event.wd) + #if event.dir: + # print "Removing watch directory: ", event.pathname + # wd = wm.get_wd(event.pathname) + # wm.rm_watch(wd, rec=True) + + #if a file is moved in, look for a cookie to claim + def process_IN_MOVED_TO(self, event): + t = threading.Thread(target=self.threaded_process_IN_MOVED_TO, args=(event,)) + t.daemon = True + t.start() + + def threaded_process_IN_MOVED_TO(self, event): + global movedFromCount + time.sleep(archivematicaMCP.config.getint('MCPServer', "waitToActOnMoves")) + #print event + movedFromLock.acquire() + if event.cookie not in movedFrom: + #if there isn't one - error + print >>sys.stderr, "#error. No adding files to a sip in this manner." + movedFromLock.release() + return + + #remove it from the list of unfound moves + movedFromPath, filesMoved, timer = movedFrom.pop(event.cookie) + movedFromCount.value = movedFromCount.value - 1 + movedFromLock.release() + + movedToPath = os.path.join(event.path, event.name).replace(\ + self.unit.currentPath.replace("%sharedPath%", archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1), \ + "%transferDirectory%", 1) + for fileUUID, oldLocation in filesMoved: + newFilePath = oldLocation.replace(movedFromPath, movedToPath, 1) + #print "Moved: ", oldLocation, "-> (" + self.unit.UUID + ")" + newFilePath + #print "Todo - verify it belongs to this transfer" + #if it's from this transfer + #clear the SIP membership + #update current location + + databaseInterface.runSQL("UPDATE Files " + \ + "SET currentLocation='" + newFilePath + "', " + \ + "Files.sipUUID = NULL " + \ + "WHERE fileUUID='" + fileUUID + "'" ) + #else + #error ish - file doesn't belong here + #update current location & clear SIP + + #if the transfer is moved/removed + #??? + + def process_IN_DELETE(self, event): + #print event + #print "Transfer IN_DELETE" + #Wait for a moved to, and if one doesn't occur, consider it moved outside of the system. + + movedFromPath = os.path.join(event.path, event.name).replace(\ + self.unit.currentPath.replace("%sharedPath%", archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1), \ + "%transferDirectory%", 1) + filesMoved = [] + sql = """SELECT fileUUID, currentLocation FROM Files WHERE transferUUID = '""" + self.unit.UUID + "' AND removedTime = 0 AND currentLocation LIKE '" + MySQLdb.escape_string(movedFromPath).replace("%", "\%") + "%';" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + filesMoved.append(row) + row = c.fetchone() + sqlLock.release() + for fileUUID, currentLocation in filesMoved: + fileWasRemoved(fileUUID, eventOutcomeDetailNote = "removed from: " + currentLocation) + + def process_IN_MOVE_SELF(self, event): + #print event + #print "Transfer IN_MOVE_SELF" + path = event.pathname + wdrm = [event.wd] + if path.endswith("-unknown-path"): + recrm = path[:path.rfind("-unknown-path")] + "/" + else: + recrm = path + "/" + for key, watch in self.wm.watches.iteritems(): + if watch.path.startswith(recrm): + wdrm.append(watch.wd) + #print "Watch directory: ", event.wd, self.wm.get_path(event.wd) + #print "Removing watch directory: ", event.pathname + #wd = self.wm.get_wd(event.pathname) + rr = self.wm.rm_watch(wdrm, rec=False) + #print "rr: ", rr + #print self.wm + + if recrm == self.unit.currentPath.replace("%sharedPath%", archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1): + #print "stopped notifier for: ", self.unit.currentPath + self.notifier.stop() + + def process_IN_DELETE_SELF(self, event): + if event.pathname + "/" == self.unit.currentPath.replace("%sharedPath%", archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1): + #print "stopped notifier for: ", self.unit.currentPath + self.notifier.stop() + + +def addWatchForTransfer(path, unit): + wm = pyinotify.WatchManager() + w = transferWatch(unit, wm) + notifier = pyinotify.ThreadedNotifier(wm, w) + w.notifier = notifier + wm.add_watch(path, mask, rec=True, auto_add=True) + notifier.start() + return notifier + +def addWatchForSIP(path, unit): + wm = pyinotify.WatchManager() + w = SIPWatch(unit, wm) + notifier = pyinotify.ThreadedNotifier(wm, w) + w.notifier = notifier + wm.add_watch(path, mask, rec=True, auto_add=True) + notifier.start() + return notifier + +def loadExistingFiles(): + #Transfers + directory = completedTransfersDirectory + if not os.path.isdir(directory): + os.makedirs(directory) + for item in os.listdir(directory): + if item == ".svn": + continue + path = os.path.join(directory, item) + if os.path.isdir(path): + path = path + "/" + unit = unitTransfer(path) + addWatchForTransfer(path, unit) + + #SIPS + directory = sipCreationDirectory + if not os.path.isdir(directory): + os.makedirs(directory) + for item in os.listdir(directory): + if item == ".svn": + continue + path = os.path.join(directory, item) + if os.path.isdir(path): + path = path + "/" + UUID = archivematicaMCP.findOrCreateSipInDB(path) + unit = unitSIP(path, UUID) + addWatchForSIP(path, unit) + + +class SIPCreationWatch(pyinotify.ProcessEvent): + "watches for new sips/completed transfers" + def __init__(self): + self.sips = {} + + def process_IN_CREATE(self, event): + self.process_IN_MOVED_TO(event) + + + def process_IN_MOVED_TO(self, event): + #time.sleep(archivematicaMCP.dbWaitSleep) #let db be updated by the microservice that moved it. + #print event + #print "process_IN_MOVED_TO SIPCreationWatch" + path = os.path.join(event.path, event.name) + if not os.path.isdir(path): + print >>sys.stderr, "Bad path for watching - not a directory: ", path + return + if os.path.abspath(event.path) == os.path.abspath(completedTransfersDirectory): + path = path + "/" + unit = unitTransfer(path) + addWatchForTransfer(path, unit) + elif os.path.abspath(event.path) == os.path.abspath(sipCreationDirectory): + path = path + "/" + UUID = archivematicaMCP.findOrCreateSipInDB(path, waitSleep=0) + unit = unitSIP(path.replace(archivematicaMCP.config.get('MCPServer', "sharedDirectory"), "%sharedPath%", 1), UUID) + notifier = addWatchForSIP(path, unit) + self.sips[path[:-1]] = notifier + else: + print >>sys.stderr, "Bad path for watching: ", event.path + + + #def process_IN_MOVED_FROM(self, event): + # print event + # if event.pathname in self.sips: + # print "stopping watch on: ", event.name + # notifier = self.sips.pop(event.pathname) + # notifier.stop() + +def startWatching(): + wm = pyinotify.WatchManager() + notifier = pyinotify.ThreadedNotifier(wm, SIPCreationWatch()) + wm.add_watch(completedTransfersDirectory, mask, rec=False, auto_add=False) + wm.add_watch(sipCreationDirectory, mask, rec=False, auto_add=False) + notifier.start() + #notifier.loop() + +def main(): + loadExistingFiles() + startWatching() + +def debugMonitor(): + while True: + dblockstatus = "SQL Lock: Locked" + if databaseInterface.sqlLock.acquire(False): + databaseInterface.sqlLock.release() + dblockstatus = "SQL Lock: Unlocked" + #print "<DEBUG type=\"transferD\">", "\tDate Time: ", databaseInterface.getUTCDate(), "\tThreadCount: ", threading.activeCount(), "movedFromCount", movedFromCount.value, dblockstatus, "</DEBUG>" + time.sleep(10) + +def mainWithMovedFromCounter(movedFrom): + global movedFromCount + si = singleinstance("/tmp/archivematicaMCPTransferDPID") + if si.alreadyrunning(): + print >>sys.stderr, "Another instance is already running. Killing PID:", si.pid + si.kill() + movedFromCount = movedFrom + main() + databaseInterface.reconnect() + debugMonitor() + + +if __name__ == '__main__': + main() diff --git a/src/MCPServer/lib/unit.py b/src/MCPServer/lib/unit.py new file mode 100755 index 0000000000..cb7e73366d --- /dev/null +++ b/src/MCPServer/lib/unit.py @@ -0,0 +1,34 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +class unit: + def __init__(self, currentPath, UUID): + self.currentPath = currentPath.__str__() + self.UUID = UUID + + def getMagicLink(self): + return + + def setMagicLink(self,link, exitStatus=""): + return diff --git a/src/MCPServer/lib/unitDIP.py b/src/MCPServer/lib/unitDIP.py new file mode 100755 index 0000000000..bac6e7e0d2 --- /dev/null +++ b/src/MCPServer/lib/unitDIP.py @@ -0,0 +1,124 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +from unit import unit +from unitFile import unitFile +import archivematicaMCP +import os +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +import lxml.etree as etree + + +class unitDIP(unit): + + def __init__(self, currentPath, UUID): + self.currentPath = currentPath.__str__() + self.UUID = UUID + self.fileList = {} + self.owningUnit = None + + def reloadFileList(self): + self.fileList = {} + #os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]]) + currentPath = self.currentPath.replace("%sharedPath%", \ + archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1) + "/" + for directory, subDirectories, files in os.walk(currentPath): + directory = directory.replace( currentPath, "%SIPDirectory%", 1) + for file in files: + filePath = os.path.join(directory, file) + #print filePath + self.fileList[filePath] = unitFile(filePath) + + sql = """SELECT fileUUID, currentLocation FROM Files WHERE sipUUID = '""" + self.UUID + "'" #AND Files.removedTime = 0; TODO + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + #print row + UUID = row[0] + currentPath = row[1] + if currentPath in self.fileList: + self.fileList[currentPath].UUID = UUID + else: + print "todo: find deleted files/exclude" + print row[99]#fail + row = c.fetchone() + self.fileList[filePath].UUID = UUID + sqlLock.release() + + + + + def reload(self): + #sql = """SELECT * FROM SIPs WHERE sipUUID = '""" + self.UUID + "'" + #c, sqlLock = databaseInterface.querySQL(sql) + #row = c.fetchone() + #while row != None: + # print row + # #self.UUID = row[0] + # self.createdTime = row[1] + # self.currentPath = row[2] + # row = c.fetchone() + #sqlLock.release() + + #no-op for reload on DIP + return + + + def getReplacementDic(self, target): + # self.currentPath = currentPath.__str__() + # self.UUID = uuid.uuid4().__str__() + #Pre do some variables, that other variables rely on, because dictionaries don't maintain order + SIPUUID = self.UUID + if self.currentPath.endswith("/"): + SIPName = os.path.basename(self.currentPath[:-1]).replace("-" + SIPUUID, "") + else: + SIPName = os.path.basename(self.currentPath).replace("-" + SIPUUID, "") + SIPDirectory = self.currentPath.replace(archivematicaMCP.config.get('MCPServer', "sharedDirectory"), "%sharedPath%") + relativeDirectoryLocation = target.replace(archivematicaMCP.config.get('MCPServer', "sharedDirectory"), "%sharedPath%") + + + ret = { \ + "%SIPLogsDirectory%": SIPDirectory + "logs/", \ + "%SIPObjectsDirectory%": SIPDirectory + "objects/", \ + "%SIPDirectory%": SIPDirectory, \ + "%SIPDirectoryBasename%": os.path.basename(os.path.abspath(SIPDirectory)), \ + "%relativeLocation%": target.replace(self.currentPath, relativeDirectoryLocation, 1), \ + "%processingDirectory%": archivematicaMCP.config.get('MCPServer', "processingDirectory"), \ + "%checksumsNoExtention%":archivematicaMCP.config.get('MCPServer', "checksumsNoExtention"), \ + "%watchDirectoryPath%":archivematicaMCP.config.get('MCPServer', "watchDirectoryPath"), \ + "%rejectedDirectory%":archivematicaMCP.config.get('MCPServer', "rejectedDirectory"), \ + "%SIPUUID%":SIPUUID, \ + "%SIPName%":SIPName \ + } + return ret + + def xmlify(self): + ret = etree.Element("unit") + etree.SubElement(ret, "type").text = "DIP" + unitXML = etree.SubElement(ret, "unitXML") + etree.SubElement(unitXML, "UUID").text = self.UUID + etree.SubElement(unitXML, "currentPath").text = self.currentPath.replace(archivematicaMCP.config.get('MCPServer', "sharedDirectory"), "%sharedPath%") + return ret diff --git a/src/MCPServer/lib/unitFile.py b/src/MCPServer/lib/unitFile.py new file mode 100755 index 0000000000..8c9409008a --- /dev/null +++ b/src/MCPServer/lib/unitFile.py @@ -0,0 +1,57 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +from unit import unit + +class unitFile(unit): + """For objects representing a File""" + def __init__(self, currentPath, UUID="None", owningUnit=None): + self.currentPath = currentPath + self.UUID = UUID + self.owningUnit = owningUnit + self.fileGrpUse = 'None' + self.fileList={currentPath:self} + self.pathString = "" + if owningUnit: + self.pathString = owningUnit.pathString + + def getReplacementDic(self, target=None): + if target != None and self.owningUnit: + return self.owningUnit.getReplacementDic(self.owningUnit.currentPath) + # self.currentPath = currentPath.__str__() + # self.UUID = uuid.uuid4().__str__() + #Pre do some variables, that other variables rely on, because dictionaries don't maintain order + else: + ret = {\ + "%relativeLocation%": self.currentPath, \ + "%fileUUID%": self.UUID, \ + "%fileGrpUse%": self.fileGrpUse + } + return ret + + def reload(self): + return + + def reloadFileList(self): + return diff --git a/src/MCPServer/lib/unitSIP.py b/src/MCPServer/lib/unitSIP.py new file mode 100755 index 0000000000..78e9ec6df3 --- /dev/null +++ b/src/MCPServer/lib/unitSIP.py @@ -0,0 +1,148 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +import uuid +from unit import unit +from unitFile import unitFile +import archivematicaMCP +import os +import sys +import lxml.etree as etree +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +from databaseFunctions import insertIntoEvents +from databaseFunctions import deUnicode + + + +class unitSIP(unit): + + def __init__(self, currentPath, UUID): + self.currentPath = currentPath.__str__() + self.UUID = UUID + self.fileList = {} + self.pathString = "%SIPDirectory%" + self.owningUnit = None + + def reloadFileList(self): + self.fileList = {} + #os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]]) + currentPath = self.currentPath.replace("%sharedPath%", \ + archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1) + "/" + for directory, subDirectories, files in os.walk(currentPath): + directory = directory.replace( currentPath, "%SIPDirectory%", 1) + for file in files: + if directory != "%SIPDirectory%": + filePath = os.path.join(directory, file) + else: + filePath = directory + file + #print filePath + self.fileList[filePath] = unitFile(filePath, owningUnit=self) + + sql = """SELECT fileUUID, currentLocation, fileGrpUse FROM Files WHERE removedTime = 0 AND sipUUID = '""" + self.UUID + "'" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + #print row + UUID = row[0] + currentPath = row[1] + fileGrpUse = row[2] + if currentPath in self.fileList: + self.fileList[currentPath].UUID = UUID + self.fileList[currentPath].fileGrpUse = fileGrpUse + else: + print >>sys.stderr, self.fileList + eventDetail = "SIP {" + self.UUID + "} has file {" + UUID + "}\"" + currentPath + "\" in the database, but file doesn't exist in the file system." + print >>sys.stderr, "!!!", eventDetail, "!!!" + row = c.fetchone() + sqlLock.release() + + def setMagicLink(self,link, exitStatus=""): + if exitStatus != "": + sql = """UPDATE SIPs SET magicLink='""" + link + """', magicLinkExitMessage='""" + exitStatus + """' WHERE sipUUID='""" + self.UUID + """';""" + else: + sql = """UPDATE SIPs SET magicLink='""" + link + """' WHERE sipUUID='""" + self.UUID + """';""" + databaseInterface.runSQL(sql) + + def getMagicLink(self): + ret = None + sql = """SELECT magicLink, magicLinkExitMessage FROM SIPs WHERE sipUUID = '""" + self.UUID + "'" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + print row + ret = row + row = c.fetchone() + sqlLock.release() + return ret + + + def reload(self): + sql = """SELECT * FROM SIPs WHERE sipUUID = '""" + self.UUID + "'" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + print row + #self.UUID = row[0] + self.createdTime = deUnicode(row[1]) + self.currentPath = deUnicode(row[2]) + row = c.fetchone() + sqlLock.release() + + + def getReplacementDic(self, target): + # self.currentPath = currentPath.__str__() + # self.UUID = uuid.uuid4().__str__() + #Pre do some variables, that other variables rely on, because dictionaries don't maintain order + SIPUUID = self.UUID + if self.currentPath.endswith("/"): + SIPName = os.path.basename(self.currentPath[:-1]).replace("-" + SIPUUID, "") + else: + SIPName = os.path.basename(self.currentPath).replace("-" + SIPUUID, "") + SIPDirectory = self.currentPath.replace(archivematicaMCP.config.get('MCPServer', "sharedDirectory"), "%sharedPath%") + relativeDirectoryLocation = target.replace(archivematicaMCP.config.get('MCPServer', "sharedDirectory"), "%sharedPath%") + + + ret = { \ + "%SIPLogsDirectory%": SIPDirectory + "logs/", \ + "%SIPObjectsDirectory%": SIPDirectory + "objects/", \ + "%SIPDirectory%": SIPDirectory, \ + "%SIPDirectoryBasename%": os.path.basename(os.path.abspath(SIPDirectory)), \ + "%relativeLocation%": target.replace(self.currentPath, relativeDirectoryLocation, 1), \ + "%processingDirectory%": archivematicaMCP.config.get('MCPServer', "processingDirectory"), \ + "%checksumsNoExtention%":archivematicaMCP.config.get('MCPServer', "checksumsNoExtention"), \ + "%watchDirectoryPath%":archivematicaMCP.config.get('MCPServer', "watchDirectoryPath"), \ + "%rejectedDirectory%":archivematicaMCP.config.get('MCPServer', "rejectedDirectory"), \ + "%SIPUUID%":SIPUUID, \ + "%SIPName%":SIPName \ + } + return ret + + def xmlify(self): + ret = etree.Element("unit") + etree.SubElement(ret, "type").text = "SIP" + unitXML = etree.SubElement(ret, "unitXML") + etree.SubElement(unitXML, "UUID").text = self.UUID + etree.SubElement(unitXML, "currentPath").text = self.currentPath.replace(archivematicaMCP.config.get('MCPServer', "sharedDirectory"), "%sharedPath%") + return ret diff --git a/src/MCPServer/lib/unitTransfer.py b/src/MCPServer/lib/unitTransfer.py new file mode 100755 index 0000000000..103a3ea6fd --- /dev/null +++ b/src/MCPServer/lib/unitTransfer.py @@ -0,0 +1,198 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +from unit import unit +from unitFile import unitFile +import uuid +import archivematicaMCP +import os +import time +import sys +import traceback +import pyinotify +import threading +import shutil +import MySQLdb +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +import lxml.etree as etree +from fileOperations import renameAsSudo +from databaseFunctions import insertIntoEvents +from databaseFunctions import deUnicode + +class unitTransfer(unit): + def __init__(self, currentPath, UUID=""): + self.owningUnit = None + #Just Use the end of the directory name + self.pathString = "%transferDirectory%" + currentPath2 = currentPath.replace(archivematicaMCP.config.get('MCPServer', "sharedDirectory"), \ + "%sharedPath%", 1) + + if UUID == "": + sql = """SELECT transferUUID FROM Transfers WHERE currentLocation = '""" + MySQLdb.escape_string(currentPath2) + "'" + time.sleep(.5) + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + UUID = row[0] + print "Opening existing Transfer:", UUID, "-", currentPath2 + row = c.fetchone() + sqlLock.release() + + if UUID == "": + uuidLen = -36 + if archivematicaMCP.isUUID(currentPath[uuidLen-1:-1]): + UUID = currentPath[uuidLen-1:-1] + else: + UUID = uuid.uuid4().__str__() + self.UUID = UUID + sql = """INSERT INTO Transfers (transferUUID, currentLocation) + VALUES ('""" + UUID + databaseInterface.separator + MySQLdb.escape_string(currentPath2) + "');" + databaseInterface.runSQL(sql) + + self.currentPath = currentPath2 + self.UUID = UUID + self.fileList = {} + + + def reloadFileList(self): + print "DEBUG reloading transfer file list: ", self.UUID + self.fileList = {} + #os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]]) + currentPath = self.currentPath.replace("%sharedPath%", \ + archivematicaMCP.config.get('MCPServer', "sharedDirectory"), 1) + "/" + #print "currentPath: ", currentPath, type(currentPath) + try: + #print currentPath, type(currentPath) + for directory, subDirectories, files in os.walk(currentPath): + directory = directory.replace( currentPath, "%transferDirectory%", 1) + for file in files: + if "%transferDirectory%" != directory: + filePath = os.path.join(directory, file) + else: + filePath = directory + file + self.fileList[filePath] = unitFile(filePath, owningUnit=self) + + sql = """SELECT fileUUID, currentLocation, fileGrpUse FROM Files WHERE removedTime = 0 AND transferUUID = '""" + self.UUID + "'" + #print sql + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + print self.fileList + while row != None: + #print row + UUID = row[0] + currentPath = row[1].encode("utf-8") + fileGrpUse = row[2] + #print currentPath in self.fileList, row + if currentPath in self.fileList: + self.fileList[currentPath].UUID = UUID + self.fileList[currentPath].fileGrpUse = fileGrpUse + else: + print >>sys.stderr, "!!!", "Transfer {" + self.UUID + "} has file {" + UUID + "}\"", currentPath, "\" in the database, but file doesn't exist in the file system.", "!!!" + row = c.fetchone() + sqlLock.release() + + except Exception as inst: + traceback.print_exc(file=sys.stdout) + print type(inst) + print inst.args + exit(1) + + def updateLocation(self, newLocation): + self.currentPath = newLocation + sql = """UPDATE Transfers SET currentPath='""" + newLocation + """' WHERE transferUUID='""" + self.UUID + """';""" + databaseInterface.runSQL(sql) + + def setMagicLink(self,link, exitStatus=""): + if exitStatus != "": + sql = """UPDATE Transfers SET magicLink='""" + link + """', magicLinkExitMessage='""" + exitStatus + """' WHERE transferUUID='""" + self.UUID + """';""" + else: + sql = """UPDATE Transfers SET magicLink='""" + link + """' WHERE transferUUID='""" + self.UUID + """';""" + databaseInterface.runSQL(sql) + + def getMagicLink(self): + ret = None + sql = """SELECT magicLink, magicLinkExitMessage FROM Transfers WHERE transferUUID = '""" + self.UUID + "'" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + print row + ret = row + row = c.fetchone() + sqlLock.release() + return ret + + + def reload(self): + sql = """SELECT transferUUID, currentLocation FROM Transfers WHERE transferUUID = '""" + self.UUID + "'" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + self.UUID = deUnicode(row[0]) + #self.createdTime = row[1] + self.currentPath = deUnicode(row[1]) + row = c.fetchone() + sqlLock.release() + return + + + def getReplacementDic(self, target): + # self.currentPath = currentPath.__str__() + # self.UUID = uuid.uuid4().__str__() + #Pre do some variables, that other variables rely on, because dictionaries don't maintain order + SIPUUID = self.UUID + if self.currentPath.endswith("/"): + SIPName = os.path.basename(self.currentPath[:-1]).replace("-" + SIPUUID, "") + else: + SIPName = os.path.basename(self.currentPath).replace("-" + SIPUUID, "") + SIPDirectory = self.currentPath.replace(archivematicaMCP.config.get('MCPServer', "sharedDirectory"), "%sharedPath%") + relativeDirectoryLocation = target.replace(archivematicaMCP.config.get('MCPServer', "sharedDirectory"), "%sharedPath%") + + + ret = { \ + "%SIPLogsDirectory%": SIPDirectory + "logs/", \ + "%SIPObjectsDirectory%": SIPDirectory + "objects/", \ + "%SIPDirectory%": SIPDirectory, \ + "%transferDirectory%": SIPDirectory, \ + "%SIPDirectoryBasename%": os.path.basename(os.path.abspath(SIPDirectory)), \ + "%relativeLocation%": target.replace(self.currentPath, relativeDirectoryLocation, 1), \ + "%processingDirectory%": archivematicaMCP.config.get('MCPServer', "processingDirectory"), \ + "%checksumsNoExtention%":archivematicaMCP.config.get('MCPServer', "checksumsNoExtention"), \ + "%watchDirectoryPath%":archivematicaMCP.config.get('MCPServer', "watchDirectoryPath"), \ + "%rejectedDirectory%":archivematicaMCP.config.get('MCPServer', "rejectedDirectory"), \ + "%SIPUUID%":SIPUUID, \ + "%SIPName%":SIPName \ + } + return ret + + def xmlify(self): + ret = etree.Element("unit") + etree.SubElement(ret, "type").text = "Transfer" + unitXML = etree.SubElement(ret, "unitXML") + etree.SubElement(unitXML, "UUID").text = self.UUID + tempPath = self.currentPath.replace(archivematicaMCP.config.get('MCPServer', "sharedDirectory"), "%sharedPath%").decode("utf-8") + etree.SubElement(unitXML, "currentPath").text = tempPath + + return ret + diff --git a/src/MCPServer/lib/watchDirectory.py b/src/MCPServer/lib/watchDirectory.py new file mode 100755 index 0000000000..2006897e90 --- /dev/null +++ b/src/MCPServer/lib/watchDirectory.py @@ -0,0 +1,115 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ +# @thanks to http://timgolden.me.uk/python/win32_how_do_i/watch_directory_for_changes.html +import os +import time +import threading +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from archivematicaFunctions import unicodeToStr + +from archivematicaMCP import debug +DEBUG = debug + +class archivematicaWatchDirectory: + """Watches for new files/directories.""" + def __init__(self, directory, + variablesAdded=None, + callBackFunctionAdded=None, + variablesRemoved=None, + callBackFunctionRemoved=None, + alertOnDirectories=True, + alertOnFiles=True, + interval=1, + threaded=True): + self.run = False + self.variablesAdded = variablesAdded + self.callBackFunctionAdded = callBackFunctionAdded + self.variablesRemoved = variablesRemoved + self.callBackFunctionRemoved = callBackFunctionRemoved + self.directory = directory + self.alertOnDirectories = alertOnDirectories + self.alertOnFiles = alertOnFiles + self.interval= interval + + if not os.path.isdir(directory): + os.makedirs(directory) + + if threaded: + t = threading.Thread(target=self.start) + t.daemon = True + t.start() + else: + self.start() + + def start(self): + """Based on polling example: http://timgolden.me.uk/python/win32_how_do_i/watch_directory_for_changes.html""" + self.run = True + if DEBUG: + print "watching directory: ", self.directory + before = dict ([(f, None) for f in os.listdir (self.directory)]) + while self.run: + time.sleep (self.interval) + after = dict ([(f, None) for f in os.listdir (self.directory)]) + added = [f for f in after if not f in before] + removed = [f for f in before if not f in after] + if added: + if DEBUG: + print "Added: ", ", ".join (added) + for i in added: + i = unicodeToStr(i) + directory = unicodeToStr(self.directory) + self.event(os.path.join(directory, i), self.variablesAdded, self.callBackFunctionAdded) + if removed: + if DEBUG: + print "Removed: ", ", ".join (removed) + for i in removed: + i = unicodeToStr(i) + directory = unicodeToStr(self.directory) + self.event(os.path.join(directory, i), self.variablesRemoved, self.callBackFunctionRemoved) + before = after + + def event(self, path, variables, function): + if not function: + return + if os.path.isdir(path) and self.alertOnDirectories: + function(path, variables) + if os.path.isfile(path) and self.alertOnFiles: + function(path, variables) + + def stop(self): + self.run = False + +def testCallBackFunction(path, variables): + print path, variables + + +if __name__ == '__main__': + print "example use" + directory = "/tmp/" + #directory = "." + variablesOnAdded = {"something":"yes", "nothing":"no"} + archivematicaWatchDirectory(directory, threaded=False, variablesAdded=variablesOnAdded, callBackFunctionAdded=testCallBackFunction, callBackFunctionRemoved=testCallBackFunction) + + \ No newline at end of file diff --git a/src/MCPServer/share/mysql b/src/MCPServer/share/mysql new file mode 100755 index 0000000000..d3b93eae7a --- /dev/null +++ b/src/MCPServer/share/mysql @@ -0,0 +1,5051 @@ +/* +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ +*/ + +DROP TABLE IF EXISTS Accesses; +CREATE TABLE Accesses ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + SIPUUID VARCHAR(50), + target LONGTEXT, + resource LONGTEXT, + status LONGTEXT, + statusCode TINYINT UNSIGNED, + exitCode TINYINT UNSIGNED, + createdTime TIMESTAMP DEFAULT NOW(), + updatedTime TIMESTAMP +) DEFAULT CHARSET=utf8; + +DROP TABLE IF EXISTS Agents; +CREATE TABLE Agents ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + agentIdentifierType LONGTEXT, + agentIdentifierValue LONGTEXT, + agentName LONGTEXT, + agentType LONGTEXT +) DEFAULT CHARSET=utf8; + +SET @archivematicaAgent = 1; +SET @organizationAgent = 2; +INSERT INTO Agents (pk, agentIdentifierType, agentIdentifierValue, agentName, agentType) + VALUES (@archivematicaAgent, 'preservation system', 'Archivematica-0.9', 'Archivematica', 'software'), + (@organizationAgent, 'repository code', 'ORG', 'Your Organization Name Here', 'organization') +; + + +DROP TABLE IF EXISTS MetadataAppliesToTypes; +CREATE TABLE MetadataAppliesToTypes ( + pk INT PRIMARY KEY, + description varchar(50) +) DEFAULT CHARSET=utf8; + +SET @SIPMetadataAppliesToType = 1; +SET @TransferMetadataAppliesToType = 2; +SET @FileMetadataAppliesToType = 3; +INSERT INTO MetadataAppliesToTypes (pk, description) + VALUES ( @SIPMetadataAppliesToType, 'SIP'), + ( @FileMetadataAppliesToType, 'File'), + ( @TransferMetadataAppliesToType, 'Transfer' ) +; + + +DROP TABLE IF EXISTS Dublincore; +CREATE TABLE Dublincore ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + metadataAppliesToType INT, + Foreign Key (metadataAppliesToType) references MetadataAppliesToTypes(pk), + metadataAppliesToidentifier VARCHAR(50), + title LONGTEXT, + creator LONGTEXT, + subject LONGTEXT, + description LONGTEXT, + publisher LONGTEXT, + contributor LONGTEXT, + date LONGTEXT, + type LONGTEXT, + format LONGTEXT, + identifier LONGTEXT, + source LONGTEXT, + relation LONGTEXT, + language LONGTEXT, + coverage LONGTEXT, + rights LONGTEXT +) DEFAULT CHARSET=utf8; + + +-- rightsStatement (O, R) +DROP TABLE IF EXISTS RightsStatement; +CREATE TABLE RightsStatement ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + metadataAppliesToType INT, + Foreign Key (metadataAppliesToType) references MetadataAppliesToTypes(pk), + metadataAppliesToidentifier VARCHAR(50), + rightsStatementIdentifierType LONGTEXT NOT NULL, + rightsStatementIdentifierValue LONGTEXT NOT NULL, + fkAgent int(10) UNSIGNED DEFAULT 0 NOT NULL, + rightsBasis LONGTEXT NOT NULL +) DEFAULT CHARSET=utf8; +ALTER TABLE RightsStatement ADD INDEX fkAgent (fkAgent); + +-- 4.1.3 copyrightInformation (O, NR) +DROP TABLE IF EXISTS RightsStatementCopyright; +CREATE TABLE RightsStatementCopyright( + pk INT(10) unsigned NOT NULL AUTO_INCREMENT, + fkRightsStatement INT(10) unsigned DEFAULT NULL, + copyrightStatus LONGTEXT NOT NULL, + copyrightJurisdiction LONGTEXT NOT NULL, + copyrightStatusDeterminationDate LONGTEXT NOT NULL, + copyrightApplicableStartDate LONGTEXT NOT NULL, + copyrightApplicableEndDate LONGTEXT NOT NULL, + copyrightApplicableEndDateOpen BOOLEAN NOT NULL DEFAULT 0, + PRIMARY KEY (pk), + KEY fkRightsStatement (fkRightsStatement) +) DEFAULT CHARSET=utf8; + +-- 4.1.3.4 copyrightNote (O, R) +DROP TABLE IF EXISTS RightsStatementCopyrightNote; +CREATE TABLE RightsStatementCopyrightNote ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + fkRightsStatementCopyrightInformation INT UNSIGNED, + Foreign Key (fkRightsStatementCopyrightInformation) references RightsStatementCopyright(pk), + copyrightNote LONGTEXT NOT NULL +) DEFAULT CHARSET=utf8; + +-- 4.1.3.5 copyrightDocumentationIdentifier (O, R) +DROP TABLE IF EXISTS RightsStatementCopyrightDocumentationIdentifier; +CREATE TABLE RightsStatementCopyrightDocumentationIdentifier ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + fkRightsStatementCopyrightInformation INT UNSIGNED, + copyrightDocumentationIdentifierType longtext NOT NULL, + copyrightDocumentationIdentifierValue longtext NOT NULL, + copyrightDocumentationIdentifierRole longtext NOT NULL, + Foreign Key (fkRightsStatementCopyrightInformation) references RightsStatementCopyright(pk) +) DEFAULT CHARSET=utf8; + +-- 4.1.4 licenseInformation (O, NR) +DROP TABLE IF EXISTS RightsStatementLicense; +CREATE TABLE RightsStatementLicense( + pk INT(10) unsigned NOT NULL AUTO_INCREMENT, + fkRightsStatement INT(10) unsigned DEFAULT NULL, + licenseTerms longtext, + licenseApplicableStartDate LONGTEXT NOT NULL, + licenseApplicableEndDate LONGTEXT NOT NULL, + licenseApplicableEndDateOpen BOOLEAN NOT NULL DEFAULT 0, + PRIMARY KEY (pk), + KEY fkRightsStatement (fkRightsStatement) +) DEFAULT CHARSET=utf8; + +-- 4.1.4.1 licenseDocumentationIdentifier (O, R) +DROP TABLE IF EXISTS RightsStatementLicenseDocumentationIdentifier; +CREATE TABLE RightsStatementLicenseDocumentationIdentifier ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + fkRightsStatementLicense INT UNSIGNED, + licenseDocumentationIdentifierType longtext NOT NULL, + licenseDocumentationIdentifierValue longtext NOT NULL, + licenseDocumentationIdentifierRole longtext NOT NULL, + Foreign Key (fkRightsStatementLicense) references RightsStatementLicense(pk) +) DEFAULT CHARSET=utf8; + +-- 4.1.4.3 licenseNote (O, R) +DROP TABLE IF EXISTS RightsStatementLicenseNote; +CREATE TABLE RightsStatementLicenseNote ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + fkRightsStatementLicense INT UNSIGNED, + Foreign Key (fkRightsStatementLicense) references RightsStatementLicense(pk), + licenseNote LONGTEXT NOT NULL +) DEFAULT CHARSET=utf8; + +DROP TABLE IF EXISTS ArchivematicaRightsStatement; +CREATE TABLE ArchivematicaRightsStatement ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + metadataAppliesToType INT, + Foreign Key (metadataAppliesToType) references MetadataAppliesToTypes(pk), + metadataAppliesToidentifier VARCHAR(50), + fkRightsStatement INT UNSIGNED, + Foreign Key (fkRightsStatement) references RightsStatement(pk) +) DEFAULT CHARSET=utf8; + +-- rightsExtension (O, R) ??? -- + +-- 4.1.5 statuteInformation (O, R) +DROP TABLE IF EXISTS RightsStatementStatuteInformation; +CREATE TABLE RightsStatementStatuteInformation ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + fkRightsStatement INT UNSIGNED, + Foreign Key (fkRightsStatement) references RightsStatement(pk), + statuteJurisdiction LONGTEXT NOT NULL, + statuteCitation LONGTEXT NOT NULL, + statuteInformationDeterminationDate LONGTEXT, + statuteApplicableStartDate LONGTEXT NOT NULL, + statuteApplicableEndDate LONGTEXT NOT NULL, + statuteApplicableEndDateOpen BOOLEAN NOT NULL DEFAULT 0 +) DEFAULT CHARSET=utf8; + +-- 4.1.5.4 statuteNote (O, R) +DROP TABLE IF EXISTS RightsStatementStatuteInformationNote; +CREATE TABLE RightsStatementStatuteInformationNote ( + pk int(10) unsigned NOT NULL AUTO_INCREMENT, + fkRightsStatementStatuteInformation int(10) unsigned DEFAULT NULL, + statuteNote longtext NOT NULL, + PRIMARY KEY (pk), + Foreign Key (fkRightsStatementStatuteInformation) references RightsStatementStatuteInformation(pk) +) DEFAULT CHARSET=utf8; + +-- 4.1.5.5 statuteDocumentationIdentifier (O, R) +DROP TABLE IF EXISTS RightsStatementStatuteDocumentationIdentifier; +CREATE TABLE RightsStatementStatuteDocumentationIdentifier ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + fkRightsStatementStatuteInformation INT UNSIGNED, + statuteDocumentationIdentifierType longtext NOT NULL, + statuteDocumentationIdentifierValue longtext NOT NULL, + statuteDocumentationIdentifierRole longtext NOT NULL, + Foreign Key (fkRightsStatementStatuteInformation) references RightsStatementStatuteInformation(pk) +) DEFAULT CHARSET=utf8; + +/* +USING - ArchivematicaRightsStatement TABLE +-- 4.1.7 linkingObjectIdentifier (O, R) +DROP TABLE IF EXISTS RightsStatementLinkingObjectIdentifier; +CREATE TABLE RightsStatementLinkingObjectIdentifier ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + fkRightsStatement INT UNSIGNED, + Foreign Key (fkRightsStatement) references RightsStatement(pk), + linkingObjectIdentifierType LONGTEXT NOT NULL, + linkingObjectIdentifierValue LONGTEXT NOT NULL +) DEFAULT CHARSET=utf8; + +-- 4.1.7.? LinkingObjectRole (O, R) +DROP TABLE IF EXISTS RightsStatementLinkingObjectIdentifierLinkingObjectRole; +CREATE TABLE RightsStatementLinkingObjectIdentifierLinkingObjectRole ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + fkRightsStatementLinkingObjectIdentifier INT UNSIGNED, + Foreign Key (fkRightsStatementLinkingObjectIdentifier) references RightsStatementLinkingObjectIdentifier(pk), + LinkingObjectRole LONGTEXT NOT NULL +) DEFAULT CHARSET=utf8; +*/ + +-- 4.1.6 otherRightsInformation (O, NR) +DROP TABLE IF EXISTS RightsStatementOtherRightsInformation; +CREATE TABLE RightsStatementOtherRightsInformation ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + fkRightsStatement INT UNSIGNED, + Foreign Key (fkRightsStatement) references RightsStatement(pk), + otherRightsBasis LONGTEXT NOT NULL, + otherRightsApplicableStartDate LONGTEXT NOT NULL, + otherRightsApplicableEndDate LONGTEXT NOT NULL, + otherRightsApplicableEndDateOpen BOOLEAN NOT NULL DEFAULT 0 +) DEFAULT CHARSET=utf8; + +-- 4.1.6.1 otherRightsDocumentationIdentifier (O, R) +DROP TABLE IF EXISTS RightsStatementOtherRightsDocumentationIdentifier; +CREATE TABLE RightsStatementOtherRightsDocumentationIdentifier ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + fkRightsStatementOtherRightsInformation INT UNSIGNED, + otherRightsDocumentationIdentifierType longtext NOT NULL, + otherRightsDocumentationIdentifierValue longtext NOT NULL, + otherRightsDocumentationIdentifierRole longtext NOT NULL, + Foreign Key (fkRightsStatementOtherRightsInformation) references RightsStatementOtherRightsInformation(pk) +) DEFAULT CHARSET=utf8; + +-- 4.1.6.4 otherRightsNote (O, R) +DROP TABLE IF EXISTS RightsStatementOtherRightsNote; +CREATE TABLE RightsStatementOtherRightsNote ( + pk int(10) unsigned NOT NULL AUTO_INCREMENT, + fkRightsStatementOtherRightsInformation int(10) unsigned DEFAULT NULL, + otherRightsNote longtext NOT NULL, + PRIMARY KEY (pk), + Foreign Key (fkRightsStatementOtherRightsInformation) references RightsStatementOtherRightsInformation(pk) +) DEFAULT CHARSET=utf8; + + +-- 4.1.7 rightsGranted (O, R) +-- Issue 860: rights granted restriction is a repeatable field. +-- http://code.google.com/p/archivematica/issues/detail?id=860 +DROP TABLE IF EXISTS RightsStatementRightsGranted; +CREATE TABLE RightsStatementRightsGranted ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + fkRightsStatement INT UNSIGNED, + Foreign Key (fkRightsStatement) references RightsStatement(pk), + act LONGTEXT NOT NULL, + startDate LONGTEXT NOT NULL, + endDate LONGTEXT, + endDateOpen BOOLEAN NOT NULL DEFAULT 0 +) DEFAULT CHARSET=utf8; + +-- 4.1.7.2 restriction (O, R) +DROP TABLE IF EXISTS RightsStatementRightsGrantedRestriction; +CREATE TABLE RightsStatementRightsGrantedRestriction ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + fkRightsStatementRightsGranted INT UNSIGNED, + Foreign Key (fkRightsStatementRightsGranted) references RightsStatementRightsGranted(pk), + restriction LONGTEXT +) DEFAULT CHARSET=utf8; + +-- 4.1.7.5 rightsGrantedNote (O, R) +DROP TABLE IF EXISTS RightsStatementRightsGrantedNote; +CREATE TABLE RightsStatementRightsGrantedNote ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + fkRightsStatementRightsGranted INT UNSIGNED, + Foreign Key (fkRightsStatementRightsGranted) references RightsStatementRightsGranted(pk), + rightsGrantedNote LONGTEXT +) DEFAULT CHARSET=utf8; + +-- 4.1.9 linkingAgentIdentifier (O, R) +DROP TABLE IF EXISTS RightsStatementLinkingAgentIdentifier; +CREATE TABLE RightsStatementLinkingAgentIdentifier ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + fkRightsStatement INT UNSIGNED, + Foreign Key (fkRightsStatement) references RightsStatement(pk), + linkingAgentIdentifierType LONGTEXT NOT NULL, + linkingAgentIdentifierValue LONGTEXT NOT NULL +) DEFAULT CHARSET=utf8; + +/* +Entity semantic units +4.1 +rightsStatement (O, R) +4.1.1 rightsStatementIdentifier (M, NR) +4.1.1.1 rightsStatementIdentifierType (M, NR) +4.1.1.2 rightsStatementIdentifierValue (M, NR) +4.1.2 rightsBasis (M, NR) +4.1.3 copyrightInformation (O, NR) +4.1.3.1 copyrightStatus (M, NR) +4.1.3.2 copyrightJurisdiction (M, NR) +4.1.3.3 copyrightStatusDeterminationDate (O, NR) +4.1.3.4 copyrightNote (O, R) +4.1.4 licenseInformation (O, NR) +4.1.4.1 licenseIdentifier (O, NR) +4.1.4.1.1 licenseIdentifierType (M, NR) +4.1.4.1.2 licenseIdentifierValue (M, NR) +4.1.4.2 licenseTerms (O, NR) +4.1.4.3 licenseNote (O, R) +4.1.5 statuteInformation (O, R) +4.1.5.1 statuteJurisdiction (M, NR) +Data Dictionary for Preservation Metadata: PREMIS version 2.1 +165 +THE PREMIS DATA DICTIONARY +4.2 +166 +4.1.5.2 statuteCitation (M, NR) +4.1.5.3 statuteInformationDeterminationDate (O, NR) +4.1.5.4 statuteNote (O, R) +4.1.6 rightsGranted (O, R) +4.1.6.1 act (M, NR) +4.1.6.2 restriction (O, R) +4.1.6.3 termOfGrant (M, NR) +4.1.6.3.1 startDate (M, NR) +4.1.6.3.2 endDate (O, NR) +4.1.6.4 rightsGrantedNote (O, R) +4.1.7 linkingObjectIdentifier (O, R) +4.1.7.1 linkingObjectIdentifierType (M, NR) +4.1.7.2 linkingObjectIdentifierValue (M, NR) +4.1.7.3 linkingObjectRole (O, R) +4.1.8 linkingAgentIdentifier (O, R) +4.1.8.1 linkingAgentIdentifierType (M, NR) +4.1.8.2 linkingAgentIdentifierValue (M, NR) +4.1.8.3 linkingAgentRole (O, R) +rightsExtension (O, R) + + + + +<premis> + <rights> + <rightsStatement> + <rightsStatementIdentifier> + <rightsStatementIdentiferType>UUID</rightsStatementIdentiferType> + <rightsStatementIdentifierValue>14cbad80-70nd-4f46-887f-k1gv7f9f30h6</rightsStatementIdentifierValue> + </rightsStatementIdentifier> + <rightsBasis>License</rightsBasis> + <copyrightInformation> + <copyrightStatus></copyrightStatus> + <copyrightJurisdiction></copyrightJurisdiction> + <copyrightStatusDeterminationDate></copyrightStatusDeterminationDate> + <copyrightNote></copyrightNote> + </copyrightInformation> + <licenseInformation> + <licenseIdentifier> + <licenseIdentifierType>UUID</licenseIdentifierType> + <licenseIdentifierValue>d3e828fb-e6f1-40b6-a3c5-839773b35755</licenseIdentifierValue> + </licenseIdentifier> + <licenseTerms>This file is licensed under the Creative Commons Attribution-Share Alike 3.0 Unported license</licenseTerms> + <licenseNote></licenseNote> + </licenseInformation> + <statuteInformation> + <statuteJurisdiction></statuteJurisdiction> + <statuteCitation></statuteCitation> + <statuteInformationDeterminationDate></statuteInformationDeterminationDate> + <statuteNote></statuteNote> + </statuteInformation> + <rightsGranted> + <act>Disseminate</act> + <restriction>Allow</restriction> + <termOfGrant> + <startDate>2011-09-16</startDate> + <endDate>open</endDate> + </termOfGrant> + <rightsGrantedNote>Attribution required</rightsGrantedNote> + </rightsGranted> + <linkingObjectIdentifier> + <linkingObjectIdentifierType>UUID</linkingObjectIdentifierType> + <linkingObjectIdentifierValue>52cbad80-70fd-4f46-887f-a1be7f9f30e0</linkingObjectIdentifierValue> + </linkingObjectIdentifier> + <linkingAgentIdentifier> *Repeatable + <linkingAgentIdentifierType></linkingAgentIdentifierType> + <linkingAgentIdentifierValue></linkingAgentIdentifierValue> + <linkingAgentRole></linkingAgentRole> + </linkingAgentIdentifier> + </rightsStatement> + </rights> +</premis> +*/ + + + +-- clear && sudo mysql --execute="source ./mysql" MCP -- +DROP TABLE IF EXISTS Transfers; +CREATE TABLE Transfers ( + transferUUID VARCHAR(50) PRIMARY KEY, + currentLocation LONGTEXT, + magicLink BIGINT, + magicLinkExitMessage VARCHAR(50) DEFAULT 'Completed successfully', + type VARCHAR(50), + accessionID LONGTEXT, + sourceOfAcquisition LONGTEXT, + typeOfTransfer LONGTEXT, + description LONGTEXT, + notes LONGTEXT, + hidden BOOLEAN NOT NULL DEFAULT 0 +) DEFAULT CHARSET=utf8; + +DROP TABLE IF EXISTS Tasks; +CREATE TABLE Tasks ( + taskUUID VARCHAR(50) PRIMARY KEY, + jobUUID VARCHAR(50), + createdTime TIMESTAMP DEFAULT NOW(), + fileUUID VARCHAR(50), + fileName VARCHAR(100), + exec VARCHAR(50), + arguments VARCHAR(1000), + startTime TIMESTAMP, + client VARCHAR(50), + endTime TIMESTAMP, + stdOut LONGTEXT, + stdError LONGTEXT, + exitCode BIGINT +) DEFAULT CHARSET=utf8; + + + +DROP TABLE IF EXISTS AIPs; +CREATE TABLE AIPs ( + sipUUID VARCHAR(50) PRIMARY KEY, + createdTime TIMESTAMP DEFAULT NOW(), + sipName LONGTEXT, + sipDate TIMESTAMP, + filePath LONGTEXT +) DEFAULT CHARSET=utf8; + + +DROP TABLE IF EXISTS SIPs; +CREATE TABLE SIPs ( + sipUUID VARCHAR(50) PRIMARY KEY, + createdTime TIMESTAMP DEFAULT NOW(), + currentPath LONGTEXT, + magicLink BIGINT, + magicLinkExitMessage VARCHAR(50) DEFAULT 'Completed successfully', + hidden BOOLEAN NOT NULL DEFAULT 0 +) DEFAULT CHARSET=utf8; + + +DROP TABLE IF EXISTS Files; +CREATE TABLE Files( + fileUUID VARCHAR(50) PRIMARY KEY, + originalLocation LONGTEXT, + currentLocation LONGTEXT, + INDEX USING BTREE (currentLocation(996)), + sipUUID VARCHAR(50), + Foreign Key (sipUUID) references SIPs(sipUUID), + transferUUID VARCHAR(50), + Foreign Key (transferUUID) references Transfers(transferUUID), + removedTime TIMESTAMP DEFAULT 0, + enteredSystem TIMESTAMP DEFAULT NOW(), + fileSize BIGINT UNSIGNED, + checksum VARCHAR(100), + fileGrpUse VARCHAR(50) DEFAULT 'Original', + fileGrpUUID VARCHAR(50), + label LONGTEXT +) DEFAULT CHARSET=utf8; + + +DROP TABLE IF EXISTS FilesFits; +CREATE TABLE FilesFits( + fileUUID VARCHAR(50) PRIMARY KEY, + Foreign Key (fileUUID) references Files(fileUUID), + FITSxml LONGTEXT +) DEFAULT CHARSET=utf8; + +DROP TABLE IF EXISTS FilesIDs; +CREATE TABLE FilesIDs( + pk INT PRIMARY KEY AUTO_INCREMENT, + fileUUID VARCHAR(50), + Foreign Key (fileUUID) references Files(fileUUID), + formatName LONGTEXT, + formatVersion LONGTEXT, + formatRegistryName LONGTEXT, + formatRegistryKey LONGTEXT +) DEFAULT CHARSET=utf8; + +DROP TABLE IF EXISTS Events; +CREATE TABLE Events( + pk BIGINT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + fileUUID VARCHAR(50), + Foreign Key (fileUUID) references Files(fileUUID), + eventIdentifierUUID VARCHAR(50), + eventType LONGTEXT, + eventDateTime TIMESTAMP, + eventDetail LONGTEXT, + eventOutcome LONGTEXT, + eventOutcomeDetailNote LONGTEXT, + linkingAgentIdentifier INT +) DEFAULT CHARSET=utf8; + +DROP TABLE IF EXISTS Derivations; +CREATE TABLE Derivations ( + pk BIGINT PRIMARY KEY AUTO_INCREMENT, + sourceFileUUID VARCHAR(50), + Foreign Key (sourceFileUUID) references Files(fileUUID), + derivedFileUUID VARCHAR(50), + Foreign Key (derivedFileUUID) references Files(fileUUID), + relatedEventUUID VARCHAR(50) +) DEFAULT CHARSET=utf8; + +DROP TABLE IF EXISTS Notifications; +CREATE TABLE Notifications ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + message LONGTEXT, + created INT +) DEFAULT CHARSET=utf8; + +DROP TABLE IF EXISTS Sounds; +CREATE TABLE Sounds( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + description LONGTEXT, + fileLocation LONGTEXT +) DEFAULT CHARSET=utf8; + +SET @errorSound = 1; +SET @alertSound = 2; +SET @requiresApprovalSound = 3; + +INSERT INTO Sounds + (pk, description, fileLocation) + VALUES ( @errorSound, 'Error', '/usr/share/sounds/KDE-Im-Error-On-Connection.ogg'), + ( @alertSound, 'Alert', '/usr/share/sounds/KDE-Im-Irc-Event.ogg'), + ( @requiresApprovalSound, 'Requires approval', '/usr/share/sounds/KDE-Sys-List-End.ogg'); + + +DROP TABLE IF EXISTS TaskTypes; +CREATE TABLE TaskTypes ( + pk INT UNSIGNED PRIMARY KEY, + description LONGTEXT +) DEFAULT CHARSET=utf8; + +INSERT INTO TaskTypes + (pk, description) + VALUES ( 0, 'one instance'), + ( 1, 'for each file'), + ( 2, 'get user choice to proceed with'), + ( 3, 'assign magic link'), + ( 4, 'goto magic link'), + ( 5, 'get replacement dic from user choice'), + ( 6, 'Split creating Jobs for each file'), + ( 7, 'Split Job into many links based on file ID'), + ( 8, 'Transcoder task type'), + ( 9, 'Get microservice generated list in stdOut'), + ( 10, 'Get user choice from microservice generated list'); + +DROP TABLE IF EXISTS TasksConfigs; +CREATE TABLE TasksConfigs ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + taskType INT UNSIGNED, + Foreign Key (taskType) references TaskTypes(pk), + taskTypePKReference INT UNSIGNED, + description LONGTEXT +) DEFAULT CHARSET=utf8; + + + +DROP TABLE IF EXISTS MicroServiceChainLinks; +CREATE TABLE MicroServiceChainLinks ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + currentTask INT UNSIGNED, + Foreign Key (currentTask) references TasksConfigs(pk), + defaultNextChainLink INT UNSIGNED DEFAULT 1, + Foreign Key (defaultNextChainLink) references MicroServiceChainLinks(pk), + defaultPlaySound INT UNSIGNED, + Foreign Key (defaultPlaySound) references Sounds(pk), + microserviceGroup VARCHAR(50), + reloadFileList BOOLEAN DEFAULT TRUE, + defaultExitMessage VARCHAR(50) DEFAULT 'Failed' +) DEFAULT CHARSET=utf8; + + +DROP TABLE IF EXISTS MicroServiceChainLinksExitCodes; +CREATE TABLE MicroServiceChainLinksExitCodes ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + microServiceChainLink INT UNSIGNED, + Foreign Key (microServiceChainLink) references MicroServiceChainLinks(pk), + exitCode INT DEFAULT 0, + nextMicroServiceChainLink INT UNSIGNED, + Foreign Key (nextMicroServiceChainLink) references MicroServiceChainLinks(pk), + playSound INT UNSIGNED, + Foreign Key (playSound) references Sounds(pk), + exitMessage VARCHAR(50) DEFAULT 'Completed successfully' +) DEFAULT CHARSET=utf8; + +DROP TABLE IF EXISTS Jobs; +CREATE TABLE Jobs ( + jobUUID VARCHAR(50) PRIMARY KEY, + jobType VARCHAR(250), + createdTime TIMESTAMP DEFAULT NOW(), + createdTimeDec decimal(24,10) NOT NULL DEFAULT 0, + directory VARCHAR(250), + SIPUUID VARCHAR(50), + unitType VARCHAR(50), + currentStep VARCHAR(50), + microserviceGroup VARCHAR(50), + hidden BOOLEAN NOT NULL DEFAULT 0, + MicroServiceChainLinksPK INT UNSIGNED, + Foreign Key (MicroServiceChainLinksPK) references MicroServiceChainLinks(pk), + subJobOf VARCHAR(50) +) DEFAULT CHARSET=utf8; + +DROP TABLE IF EXISTS MicroServiceChains; +CREATE TABLE MicroServiceChains ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + startingLink INT UNSIGNED, + Foreign Key (startingLink) references MicroServiceChainLinks(pk), + description LONGTEXT +) DEFAULT CHARSET=utf8; + + +DROP TABLE IF EXISTS MicroServiceChainChoice; +CREATE TABLE MicroServiceChainChoice ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + choiceAvailableAtLink INT UNSIGNED, + Foreign Key (choiceAvailableAtLink) references MicroServiceChainLinks(pk), + chainAvailable INT UNSIGNED, + Foreign Key (chainAvailable) references MicroServiceChains(pk) +) DEFAULT CHARSET=utf8; + +DROP TABLE IF EXISTS MicroServiceChoiceReplacementDic; +CREATE TABLE MicroServiceChoiceReplacementDic ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + choiceAvailableAtLink INT UNSIGNED, + Foreign Key (choiceAvailableAtLink) references MicroServiceChainLinks(pk), + description LONGTEXT, + replacementDic LONGTEXT +) DEFAULT CHARSET=utf8; + +DROP TABLE IF EXISTS WatchedDirectoriesExpectedTypes; +CREATE TABLE WatchedDirectoriesExpectedTypes ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + description LONGTEXT +) DEFAULT CHARSET=utf8; + +SET @expectedTypeSIP = 1; +SET @expectedTypeDIP = 2; +SET @expectedTypeTransfer = 3; +INSERT INTO WatchedDirectoriesExpectedTypes(pk, description) + VALUES ( @expectedTypeSIP, 'SIP'), + ( @expectedTypeDIP, 'DIP'), + ( @expectedTypeTransfer, 'Transfer'); + +DROP TABLE IF EXISTS WatchedDirectories; +CREATE TABLE WatchedDirectories ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + watchedDirectoryPath LONGTEXT, + chain INT UNSIGNED, + Foreign Key (chain) references MicroServiceChains(pk), + onlyActOnDirectories BOOLEAN DEFAULT TRUE, + expectedType INT UNSIGNED DEFAULT 1, + Foreign Key (expectedType) references WatchedDirectoriesExpectedTypes(pk) +) DEFAULT CHARSET=utf8; + + +DROP TABLE IF EXISTS StandardTasksConfigs; +CREATE TABLE StandardTasksConfigs ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + filterFileEnd VARCHAR(50), + filterFileStart VARCHAR(50), + filterSubDir VARCHAR(50), + requiresOutputLock BOOLEAN, + standardOutputFile VARCHAR(250), + standardErrorFile VARCHAR(250), + execute VARCHAR(50), + arguments LONGTEXT +) DEFAULT CHARSET=utf8; + + +DROP TABLE IF EXISTS SourceDirectories; +CREATE TABLE SourceDirectories ( + pk int(10) unsigned NOT NULL AUTO_INCREMENT, + path longtext, + PRIMARY KEY (pk) +) DEFAULT CHARSET=utf8; + +DROP TABLE IF EXISTS ElasticsearchIndexBackup; +CREATE TABLE ElasticsearchIndexBackup ( + pk INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, + docId VARCHAR(255), + indexName VARCHAR(255), + typeName VARCHAR(255), + data LONGTEXT NOT NULL +) DEFAULT CHARSET=utf8; + +SET @microserviceGroup = 'Process submission documentation'; +-- +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, '%SIPLogsDirectory%normalizationLog.txt', '%SIPLogsDirectory%normalizationLog.txt', 'preservation', '{"%normalizeFileGrpUse%":"submissionDocumentation", "%excludeDirectory%":"%SIPObjectsDirectory%submissionDocumentation/" }'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (7, LAST_INSERT_ID(), 'Find preservation links to run.'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Normalize submission documentation to preservation'); +SET @NormalizePreservationChainForSubmissionDocumentation = LAST_INSERT_ID(); + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, '%SIPLogsDirectory%normalizationLog.txt', '%SIPLogsDirectory%normalizationLog.txt', 'thumbnail', '{"%normalizeFileGrpUse%":"original", "%excludeDirectory%":"%SIPObjectsDirectory%submissionDocumentation/" }'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (7, LAST_INSERT_ID(), 'Find thumbnail links to run.'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Normalize submission documentation to thumbnails'); +SET @NormalizeThumbnailChainForSubmissionDocumentation = LAST_INSERT_ID(); + +-- +SET @microserviceGroup = 'Normalize'; +-- +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, '%SIPLogsDirectory%normalizationLog.txt', '%SIPLogsDirectory%normalizationLog.txt', 'thumbnail', '{"%normalizeFileGrpUse%":"original", "%excludeDirectory%":"%SIPObjectsDirectory%submissionDocumentation/" }'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (7, LAST_INSERT_ID(), 'Find thumbnail links to run.'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Normalize for preservation'); +SET @NormalizePreservationChain = LAST_INSERT_ID(); + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, '%SIPLogsDirectory%normalizationLog.txt', '%SIPLogsDirectory%normalizationLog.txt', 'access', '{"%normalizeFileGrpUse%":"original", "%excludeDirectory%":"%SIPObjectsDirectory%submissionDocumentation/" }'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (7, LAST_INSERT_ID(), 'Find access links to run.'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Normalize for access'); +SET @NormalizeAccessChain = LAST_INSERT_ID(); +-- + + +-- +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, '%SIPLogsDirectory%normalizationLog.txt', '%SIPLogsDirectory%normalizationLog.txt', 'thumbnail', '{"%normalizeFileGrpUse%":"service", "%excludeDirectory%":"%SIPObjectsDirectory%submissionDocumentation/" }'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (7, LAST_INSERT_ID(), 'Find thumbnail links to run.'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Normalize for preservation'); +SET @NormalizePreservationChain = LAST_INSERT_ID(); + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, '%SIPLogsDirectory%normalizationLog.txt', '%SIPLogsDirectory%normalizationLog.txt', 'access', '{"%normalizeFileGrpUse%":"service", "%excludeDirectory%":"%SIPObjectsDirectory%submissionDocumentation/" }'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (7, LAST_INSERT_ID(), 'Find access links to run.'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Normalize for access'); +SET @NormalizeAccessFromServiceChain = LAST_INSERT_ID(); +-- + + +-- +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, '%SIPLogsDirectory%normalizationLog.txt', '%SIPLogsDirectory%normalizationLog.txt', 'thumbnail', '{"%normalizeFileGrpUse%":"service", "%excludeDirectory%":"%SIPObjectsDirectory%submissionDocumentation/" }'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (7, LAST_INSERT_ID(), 'Find thumbnail links to run.'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Normalize thumbnails'); +SET @NormalizeThumbnailsChain = LAST_INSERT_ID(); +-- + +-- +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, '%SIPLogsDirectory%normalizationLog.txt', '%SIPLogsDirectory%normalizationLog.txt', 'preservation', '{"%normalizeFileGrpUse%":"original", "%excludeDirectory%":"%SIPObjectsDirectory%submissionDocumentation/" }'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (7, LAST_INSERT_ID(), 'Find preservation links to run.'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, '%SIPLogsDirectory%normalizationLog.txt', '%SIPLogsDirectory%normalizationLog.txt', 'thumbnail', '{"%normalizeFileGrpUse%":"original", "%excludeDirectory%":"%SIPObjectsDirectory%submissionDocumentation/" }'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (7, LAST_INSERT_ID(), 'Find thumbnail links to run.'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Normalize for preservation'); +SET @NormalizePreservationChain = LAST_INSERT_ID(); + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, '%SIPLogsDirectory%normalizationLog.txt', '%SIPLogsDirectory%normalizationLog.txt', 'access', '{"%normalizeFileGrpUse%":"original", "%excludeDirectory%":"%SIPObjectsDirectory%submissionDocumentation/" }'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (7, LAST_INSERT_ID(), 'Find access links to run.'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Normalize for preservation and +access'); +SET @NormalizePreservationAndAccessChain = LAST_INSERT_ID(); +-- + +-- Move To Failed Directory -- +SET @microserviceGroup = 'Failed SIP'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveSIP_v0.0', '"%SIPDirectory%" "%sharedPath%failed/." "%SIPUUID%" "%sharedPath%" "%SIPUUID%" "%sharedPath%"'); + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Move to the failed directory'); + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink, defaultPlaySound) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL, @errorSound); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @defaultNextChainLink = @MicroServiceChainLink; + +-- Move to processing directory -- +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveSIP_v0.0', '"%SIPDirectory%" "%processingDirectory%." "%SIPUUID%" "%sharedPath%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Move to processing directory'); +SET @moveToProcessingDirectoryTaskConfig = LAST_INSERT_ID();; + + +-- Set file permissions -- +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'setDirectoryPermissionsForAppraisal_v0.0', '"%SIPDirectory%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Set file permissions'); +SET @setFilePermissionsTaskConfig = LAST_INSERT_ID(); + + +-- Reject SIP -- +SET @microserviceGroup = 'Reject SIP'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveSIP_v0.0', '"%SIPDirectory%" "%rejectedDirectory%." "%SIPUUID%" "%sharedPath%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Move to the rejected directory'); + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Reject SIP'); +SET @rejectSIPMicroServiceChain = LAST_INSERT_ID(); +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Reject'); +SET @rejectSIPMicroServiceChain2 = LAST_INSERT_ID(); +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Reject AIP'); +SET @rejectAIPMicroServiceChain = LAST_INSERT_ID(); + + +-- Reject DIP -- +SET @microserviceGroup = 'Reject DIP'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'move_v0.0', '"%SIPDirectory%" "%rejectedDirectory%."'); + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Move to the rejected directory'); + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Reject DIP'); +SET @rejectDIPMicroServiceChain = LAST_INSERT_ID(); +-- <uploadDIP ICA-ATOM> -- +SET @microserviceGroup = 'Upload DIP'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'move_v0.0', '"%SIPDirectory%" "%watchDirectoryPath%uploadedDIPs/."'); + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Move to the uploadedDIPs directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'upload-qubit_v0.0', '--url="http://localhost/ica-atom/index.php" \\\r\n--email="demo@example.com" \\\r\n--password="demo" \\\r\n--uuid="%SIPUUID%" \\\r\n--rsync-target="/tmp"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Upload DIP'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- disabled, because it uses the SIP uuid and updates the SIP/AIP directory location with that of the DIP -- +/* +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +*/ + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Upload DIP to Atom'); +SET @MicroServiceChainUploadICAATOM = LAST_INSERT_ID(); + +-- <uploadDIP - contentDM> -- +SET @microserviceGroup = 'Upload DIP'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'move_v0.0', '"%SIPDirectory%" "%watchDirectoryPath%uploadedDIPs/."'); + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Move to the uploadedDIPs directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- 7: 'Upload DIP to contentDM' +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'upload-contentDM_v0.0', '--uuid="%SIPName%-%SIPUUID%" --collection "%ContentdmCollection%" --server "%ContentdmServer%" --username "%ContentdmUser%" --group "%ContentdmGroup%" --outputDir "%watchDirectoryPath%uploadedDIPs"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Upload DIP to contentDM'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- 6: 'Restructure DIP for content DM upload' +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'restructureDIPForContentDMUpload_v0.0', '--uuid="%SIPName%-%SIPUUID%" --dipDir "%SIPDirectory%" --collection "%ContentdmCollection%" --server "%ContentdmServer%" --ingestFormat "%ContentdmIngestFormat%" --outputDir "%watchDirectoryPath%uploadedDIPs"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Restructure DIP for CONTENTdm upload'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +-- 5: 'Select upload type (Project Client or direct upload)' +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (5, LAST_INSERT_ID(), 'Select upload type (Project Client or direct upload)'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChoiceReplacementDic (choiceAvailableAtLink, description, replacementDic) + VALUES (@MicroServiceChainLink, 'Project client', '{"%ContentdmIngestFormat%":"projectclient"}'); +INSERT INTO MicroServiceChoiceReplacementDic (choiceAvailableAtLink, description, replacementDic) + VALUES (@MicroServiceChainLink, 'Direct upload', '{"%ContentdmIngestFormat%":"directupload"}'); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +-- 4: 'Select target CONTENTdm collection' +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '%ContentdmCollection%', NULL); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (10, LAST_INSERT_ID(), 'Select destination collection'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'getContentdmCollectionList_v0.0', '"%ContentdmServer%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (9, LAST_INSERT_ID(), 'Get list of collections on server'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- 3: 'Select target CONTENTdm server' +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'select-contentDM-server_v0.0', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (5, LAST_INSERT_ID(), 'Select target CONTENTdm server'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChoiceReplacementDic (choiceAvailableAtLink, description, replacementDic) + VALUES (@MicroServiceChainLink, 'localhost', '{"%ContentdmServer%":"localhost", "%ContentdmUser%":"usernamefoo", "%ContentdmGroup%":"123"}'); +INSERT INTO MicroServiceChoiceReplacementDic (choiceAvailableAtLink, description, replacementDic) + VALUES (@MicroServiceChainLink, 'contentdm.example.com', '{"%ContentdmServer%":"111.222.333.444:81", "%ContentdmUser%":"usernamebar", "%ContentdmGroup%":"456"}'); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- disabled, because it uses the SIP uuid and updates the SIP/AIP directory location with that of the DIP -- +/* +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +*/ + +-- 2: 'Upload DIP to CONTENTdm' +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Upload DIP to CONTENTdm'); +SET @MicroServiceChainUploadContentDM = LAST_INSERT_ID(); + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, LAST_INSERT_ID(), 'Upload DIP'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @MicroServiceChainUploadICAATOM); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @MicroServiceChainUploadContentDM); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectDIPMicroServiceChain); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- 1 +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'uploadDIP'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO WatchedDirectories (watchedDirectoryPath, chain, expectedType) + VALUES ('%watchDirectoryPath%uploadDIP/', @MicroServiceChain, @expectedTypeDIP); + +-- </uploadDIP> -- + +-- Store AIP -- +SET @microserviceGroup = 'Store AIP'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'remove_v0.0', '-R "%SIPDirectory%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Remove the processing directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'storeAIP_v0.0', '"%AIPsStore%" "%SIPDirectory%%SIPName%-%SIPUUID%.7z" "%SIPUUID%" "%sharedPath%www/index.html" "%SIPName%" "%date%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Store the AIP'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +-- -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (5, LAST_INSERT_ID(), 'Store AIP location'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChoiceReplacementDic (choiceAvailableAtLink, description, replacementDic) + VALUES (@MicroServiceChainLink, 'Store AIP in standard Archivematica Directory', '{"%AIPsStore%":"%sharedPath%www/AIPsStore/"}'); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Store AIP'); +SET @MicroServiceChain = LAST_INSERT_ID(); + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, LAST_INSERT_ID(), 'Store AIP'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @MicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectAIPMicroServiceChain); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'storeAIP'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO WatchedDirectories (watchedDirectoryPath, chain) + VALUES ('%watchDirectoryPath%storeAIP/', @MicroServiceChain); + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveSIP_v0.0', '"%SIPDirectory%" "%sharedPath%watchedDirectories/storeAIP/." "%SIPUUID%" "%sharedPath%" "%SIPUUID%" "%sharedPath%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Move to the store AIP approval directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +-- -- +SET @microserviceGroup = 'Prepare AIP'; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'remove_v0.0', '-R "%SIPDirectory%%SIPName%-%SIPUUID%" "%SIPDirectory%METS.%SIPUUID%.xml" "%SIPDirectory%metadata/" "%SIPLogsDirectory%" "%SIPObjectsDirectory%" "%SIPDirectory%thumbnails/"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Removed bagged files'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'setFilePermission_v0.0', '775 "%SIPDirectory%%SIPName%-%SIPUUID%.7z"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Set bag file permissions'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- Also modify extraction command in store AIP +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '7z_v0.0', 'a -bd -t7z -y -m0=%AIPCompressionAlgorithm% -mx=%AIPCompressionLevel% "%SIPDirectory%%SIPName%-%SIPUUID%.7z" "%SIPDirectory%%SIPName%-%SIPUUID%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Compress AIP'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +SET @PrepareAIPMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (5, LAST_INSERT_ID(), 'Select compression level'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChoiceReplacementDic (choiceAvailableAtLink, description, replacementDic) + VALUES (@MicroServiceChainLink, '0 - copy mode', '{"%AIPCompressionLevel%":"0"}'); +INSERT INTO MicroServiceChoiceReplacementDic (choiceAvailableAtLink, description, replacementDic) + VALUES (@MicroServiceChainLink, '1 - fastest mode', '{"%AIPCompressionLevel%":"1"}'); +INSERT INTO MicroServiceChoiceReplacementDic (choiceAvailableAtLink, description, replacementDic) + VALUES (@MicroServiceChainLink, '3 - fast compression mode', '{"%AIPCompressionLevel%":"3"}'); +INSERT INTO MicroServiceChoiceReplacementDic (choiceAvailableAtLink, description, replacementDic) + VALUES (@MicroServiceChainLink, '5 - normal compression mode', '{"%AIPCompressionLevel%":"5"}'); +INSERT INTO MicroServiceChoiceReplacementDic (choiceAvailableAtLink, description, replacementDic) + VALUES (@MicroServiceChainLink, '7 - maximum compression', '{"%AIPCompressionLevel%":"7"}'); +INSERT INTO MicroServiceChoiceReplacementDic (choiceAvailableAtLink, description, replacementDic) + VALUES (@MicroServiceChainLink, '9 - ultra compression', '{"%AIPCompressionLevel%":"9"}'); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (5, LAST_INSERT_ID(), 'Select compression algorithm'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChoiceReplacementDic (choiceAvailableAtLink, description, replacementDic) + VALUES (@MicroServiceChainLink, 'lzma', '{"%AIPCompressionAlgorithm%":"lzma"}'); +INSERT INTO MicroServiceChoiceReplacementDic (choiceAvailableAtLink, description, replacementDic) + VALUES (@MicroServiceChainLink, 'bzip', '{"%AIPCompressionAlgorithm%":"bzip"}'); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'bagit_v0.0', 'create "%SIPDirectory%%SIPName%-%SIPUUID%" "%SIPLogsDirectory%" "%SIPObjectsDirectory%" "%SIPDirectory%METS.%SIPUUID%.xml" "%SIPDirectory%metadata/" "%SIPDirectory%thumbnails/" --writer filesystem --payloadmanifestalgorithm "sha512"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Prepare AIP'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'elasticSearchAIPIndex_v0.0', '"%SIPDirectory%" "%SIPUUID%"'); +SET @elasticSearchIndexTaskTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @elasticSearchIndexTaskTask, 'Index AIP contents'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +SET @PrepareAIPMicroServiceChainLink = @MicroServiceChainLink; + +-- GENERATE AIP AND DIP -- +SET @microserviceGroup = 'Prepare DIP'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'move_v0.0', '"%SIPDirectory%DIP" "%sharedPath%watchedDirectories/uploadDIP/%SIPDirectoryBasename%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Generate DIP'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'copy_v0.0', '"%SIPDirectory%METS.%SIPUUID%.xml" "%SIPDirectory%DIP/METS.%SIPUUID%.xml"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Copy METS to DIP directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'copy_v0.0', '-R "%SIPDirectory%thumbnails" "%SIPDirectory%DIP/."'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Copy thumbnails to DIP directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Prepare AIP'; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'createMETS_v2.0', '--amdSec --baseDirectoryPath "%SIPDirectory%" --baseDirectoryPathString "SIPDirectory" --fileGroupIdentifier "%SIPUUID%" --fileGroupType "sipUUID" --xmlFile "%SIPDirectory%METS.%SIPUUID%.xml"'); +SET @GenerateMETSxmldocumentTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @GenerateMETSxmldocumentTask, 'Generate METS.xml document'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'copyTransfersMetadataAndLogs_v0.0', '--sipDirectory "%SIPDirectory%" --sipUUID "%SIPUUID%" --sharedPath "%sharedPath%"'); +SET @CopyTransfersmetadataandlogsTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @CopyTransfersmetadataandlogsTask, 'Copy transfers metadata and logs'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- disabling as it breaks the maildir format. -- +/* +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'removeEmptyDirectories_v0.0', '"%SIPObjectsDirectory%"'); +SET @RemoveEmptyDirectoriesTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @RemoveEmptyDirectoriesTask, 'Remove empty directories'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +*/ + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects/', FALSE, NULL, NULL, 'verifyPREMISChecksums_v0.0', '--fileUUID "%fileUUID%" --filePath "%relativeLocation%" --date "%date%" --eventIdentifierUUID "%taskUUID%"'); +SET @VerifychecksumsgeneratedoningestTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @VerifychecksumsgeneratedoningestTask, 'Verify checksums generated on ingest'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects/', FALSE, '%SIPLogsDirectory%removedFilesWithNoPremisMetadata.log', '%SIPLogsDirectory%removedFilesWithNoPremisMetadata.log', 'removeFilesWithoutPresmisMetadata_v0.0', '--fileUUID "%fileUUID%" --inputFile "%relativeLocation%" --sipDirectory "%SIPDirectory%"'); +SET @RemovefileswithoutlinkinginformationTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @RemovefileswithoutlinkinginformationTask, 'Remove files without linking information (failed normalization artifacts etc.)'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Process submission documentation'; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects/submissionDocumentation', FALSE, NULL, NULL, @NormalizePreservationChainForSubmissionDocumentation, Null); +SET @NormalizesubmissiondocumentationTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (6, @NormalizesubmissiondocumentationTask, 'Normalize submission documentation to preservation format'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects/submissionDocumentation', FALSE, NULL, NULL, @NormalizeThumbnailChainForSubmissionDocumentation, Null); +SET @NormalizesubmissiondocumentationThumbnailsTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (6, @NormalizesubmissiondocumentationThumbnailsTask, 'Normalize submission documentation to preservation format'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects/submissionDocumentation', FALSE, NULL, NULL, 'identifyFilesByExtension_v0.0', '"%relativeLocation%" "%fileUUID%"'); +SET @identifyFilesByExtensionsubmissiondocumentationmetadataTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @identifyFilesByExtensionsubmissiondocumentationmetadataTask, 'Identify Files ByExtension'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects/submissionDocumentation', FALSE, NULL, NULL, 'FITS_v0.0', '"%relativeLocation%" "%SIPLogsDirectory%fileMeta/%fileUUID%.xml" "%date%" "%taskUUID%" "%fileUUID%" "%fileGrpUse%"'); +SET @CharacterizeandextractsubmissiondocumentationmetadataTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @CharacterizeandextractsubmissiondocumentationmetadataTask, 'Characterize and extract metadata on submission documentation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects/submissionDocumentation', TRUE, NULL, '%SIPLogsDirectory%clamAVScan.txt', 'archivematicaClamscan_v0.0', '"%fileUUID%" "%relativeLocation%" "%date%" "%taskUUID%"'); +SET @ScanforvirusesinsubmissiondocumentationTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @ScanforvirusesinsubmissiondocumentationTask, 'Scan for viruses in submission documentation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects/submissionDocumentation', FALSE, '%SIPLogsDirectory%filenameCleanup.log', '%SIPLogsDirectory%filenameCleanup.log', 'sanitizeObjectNames_v0.0', '"%SIPDirectory%objects/submissionDocumentation/" "%SIPUUID%" "%date%" "%taskUUID%" "SIPDirectory" "sipUUID" "%SIPDirectory%"'); +SET @SanitizefileanddirectorynamesinsubmissiondocumentationTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @SanitizefileanddirectorynamesinsubmissiondocumentationTask, 'Sanitize file and directory names in submission documentation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects/submissionDocumentation', TRUE, '%SIPLogsDirectory%extraction.log', '%SIPLogsDirectory%extraction.log', 'transcoderExtractPackages_v0.0', '"%relativeLocation%" "%SIPObjectsDirectory%" "%SIPLogsDirectory%" "%date%" "%taskUUID%" "%fileUUID%"'); +SET @ExtractpackagesinsubmissiondocumentationTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @ExtractpackagesinsubmissiondocumentationTask, 'Extract packages in submission documentation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects/submissionDocumentation', FALSE, NULL, NULL, 'updateSizeAndChecksum_v0.0', '--filePath "%relativeLocation%" --fileUUID "%fileUUID%" --eventIdentifierUUID "%taskUUID%" --date "%date%"'); +SET @AssignchecksumsandfilesizestosubmissiondocumentationTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @AssignchecksumsandfilesizestosubmissiondocumentationTask, 'Assign checksums and file sizes to submissionDocumentation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects/submissionDocumentation', TRUE, '%SIPLogsDirectory%FileUUIDs.log', '%SIPLogsDirectory%FileUUIDsError.log', 'assignFileUUIDs_v0.0', '--sipUUID "%SIPUUID%" --sipDirectory "%SIPDirectory%" --filePath "%relativeLocation%" --fileUUID "%fileUUID%" --eventIdentifierUUID "%taskUUID%" --date "%date%" --use "submissionDocumentation"'); +SET @AssignfileUUIDstosubmissiondocumentationTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @AssignfileUUIDstosubmissiondocumentationTask, 'Assign file UUIDs to submission documentation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'move_v0.0', '"%SIPDirectory%metadata/submissionDocumentation" "%SIPDirectory%objects/submissionDocumentation"'); +SET @MoveSubmissionDocumentationintoobjectsdirectoryTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @MoveSubmissionDocumentationintoobjectsdirectoryTask, 'Move submission documentation into objects directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'checkForSubmissionDocumenation_v0.0', '"%SIPDirectory%metadata/submissionDocumentation"'); +SET @CheckForSubmissionDocumentationTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @CheckForSubmissionDocumentationTask, 'Check for submission documentation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'copyTransferSubmissionDocumentation_v0.0', '"%SIPUUID%" "%SIPDirectory%metadata/submissionDocumentation" "%sharedPath%"'); +SET @MoveTransferSubmissionDocumentationTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @MoveTransferSubmissionDocumentationTask, 'Copy transfer submission documentation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink2 = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink2 = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Approve'); -- approve normalization preservation and access +SET @MicroServiceChain = LAST_INSERT_ID(); + + +SET @microserviceGroup = 'Normalize'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, NULL, NULL, @NormalizeThumbnailsChain, Null); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (6, LAST_INSERT_ID(), 'Normalize thumbnails'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'createDirectory_v0.0', '-m 770 "%SIPDirectory%thumbnails/"'); +SET @createThumbnailsDirectoryTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @createThumbnailsDirectoryTask, 'Create thumbnails directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Do not normalize'); -- No normalization for sip with access directory -- +SET @noNormalizationWithAccessDirectoryMicroServiceChain = LAST_INSERT_ID(); + +SET @microserviceGroup = 'Normalize'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, LAST_INSERT_ID(), 'Approve normalization'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @MicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectSIPMicroServiceChain2); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'approveNormalization'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO WatchedDirectories (watchedDirectoryPath, chain) + VALUES ('%watchDirectoryPath%approveNormalization/preservationAndAccess/', @MicroServiceChain); + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveSIP_v0.0', '"%SIPDirectory%" "%sharedPath%watchedDirectories/approveNormalization/preservationAndAccess/." "%SIPUUID%" "%sharedPath%" "%SIPUUID%" "%sharedPath%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Move to approve normalization directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @RemovefileswithoutlinkinginformationTask, 'Remove files without linking information (failed normalization artifacts etc.)'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +SET @FinishedNormalizationWithDIPMicroServiceChainLink = @MicroServiceChainLink; +SET @ResumeAferTranscodingPreservationAndAccessLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'manual normalization'); -- Manual normalization for sip with access directory -- +SET @manualNormalizationWithAccessDirectoryMicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @RemovefileswithoutlinkinginformationTask, 'Remove files without linking information (failed normalization artifacts etc.)'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, NULL, NULL, @NormalizePreservationChain, Null); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (6, LAST_INSERT_ID(), 'Normalize preservation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @ResumeAferTranscodingPreservationAndAccessLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @ResumeAferTranscodingPreservationAndAccessLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @createThumbnailsDirectoryTask, 'Create thumbnails directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink2 = @MicroServiceChainLink; + +-- Set file permissions +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink2); +SET @NextMicroServiceChainLink2 = @MicroServiceChainLink; + + + + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Normalize for preservation'); -- preservation normalization for sip with access directory -- +SET @normalizePreservationWithAccessDirectoryMicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, NULL, NULL, @NormalizePreservationAndAccessChain, Null); -- need itoa? -- +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (6, LAST_INSERT_ID(), 'Normalize for preservation and access'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @ResumeAferTranscodingPreservationAndAccessLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @ResumeAferTranscodingPreservationAndAccessLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'createDirectory_v0.0', '-m 770 "%SIPDirectory%thumbnails/"'); +SET @createThumbnailsDirectoryTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @createThumbnailsDirectoryTask, 'Create thumbnails directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'createDirectory_v0.0', '-m 770 "%SIPDirectory%DIP/" "%SIPDirectory%DIP/objects/"'); +SET @createDIPDirectoryTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @createDIPDirectoryTask, 'Create DIP directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Normalize for preservation and +access'); +SET @CreateDIPandAIPMicroServiceChain = LAST_INSERT_ID(); +-- /CREATE AIP AND DIP -- + +-- CREATE AIP AND DIP - Access normalization only -- +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @RemovefileswithoutlinkinginformationTask, 'Remove files without linking information (failed normalization artifacts etc.)'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @FinishedNormalizationWithDIPMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, NULL, NULL, @NormalizeAccessChain, Null); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (6, LAST_INSERT_ID(), 'Normalize access'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @ResumeAferTranscodingPreservationAndAccessLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @ResumeAferTranscodingPreservationAndAccessLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @createThumbnailsDirectoryTask, 'Create thumbnails directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'createDirectory_v0.0', '-m 770 "%SIPDirectory%DIP/" "%SIPDirectory%DIP/objects/"'); +SET @createDIPDirectoryTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @createDIPDirectoryTask, 'Create DIP directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Normalize for access'); +SET @normalizeAccessOnlyMicroServiceChain = LAST_INSERT_ID(); +-- CREATE AIP AND DIP - Access normalization only -- + +-- CREATE AIP AND DIP - Access normalization only service-- +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @RemovefileswithoutlinkinginformationTask, 'Remove files without linking information (failed normalization artifacts etc.)'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @FinishedNormalizationWithDIPMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects/service', FALSE, NULL, NULL, @NormalizeAccessFromServiceChain, Null); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (6, LAST_INSERT_ID(), 'Normalize service files for access'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @ResumeAferTranscodingPreservationAndAccessLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @ResumeAferTranscodingPreservationAndAccessLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @createThumbnailsDirectoryTask, 'Create thumbnails directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'createDirectory_v0.0', '-m 770 "%SIPDirectory%DIP/" "%SIPDirectory%DIP/objects/"'); +SET @createDIPDirectoryTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @createDIPDirectoryTask, 'Create DIP directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Normalize service files for access'); +SET @normalizeAccessOnlyFromServiceMicroServiceChain = LAST_INSERT_ID(); +-- CREATE AIP AND DIP - Access normalization only -- + +-- GENERATE AIP -- + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @PrepareAIPMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @GenerateMETSxmldocumentTask, 'Generate METS.xml document'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @CopyTransfersmetadataandlogsTask, 'Copy transfers metadata and logs'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +-- disabling as it breaks the maildir format. -- +/* +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @RemoveEmptyDirectoriesTask, 'Remove empty directories'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +*/ + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @VerifychecksumsgeneratedoningestTask, 'Verify checksums generated on ingest'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @RemovefileswithoutlinkinginformationTask, 'Remove files without linking information (failed normalization artifacts etc.)'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Process submission documentation'; +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (6, @NormalizesubmissiondocumentationTask, 'Normalize submission documentation to preservation format'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (6, @NormalizesubmissiondocumentationThumbnailsTask, 'Normalize submission documentation to thumbnail format'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @identifyFilesByExtensionsubmissiondocumentationmetadataTask, 'Identify Files ByExtension'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @CharacterizeandextractsubmissiondocumentationmetadataTask, 'Characterize and extract metadata on submission documentation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @ScanforvirusesinsubmissiondocumentationTask, 'Scan for viruses in submission documentation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @SanitizefileanddirectorynamesinsubmissiondocumentationTask, 'Sanitize file and directory names in submission documentation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @ExtractpackagesinsubmissiondocumentationTask, 'Extract packages in submission documentation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @AssignchecksumsandfilesizestosubmissiondocumentationTask, 'Assign checksums and file sizes to submissionDocumentation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @AssignfileUUIDstosubmissiondocumentationTask, 'Assign file UUIDs to submission documentation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @MoveSubmissionDocumentationintoobjectsdirectoryTask, 'Move submission documentation into objects directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @CheckForSubmissionDocumentationTask, 'Check for submission documentation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @MoveTransferSubmissionDocumentationTask, 'Copy transfer submission documentation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Approve'); +SET @ProcessSubmissionDocumentationLinkMicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @RemovefileswithoutlinkinginformationTask, 'Remove files without linking information (failed normalization artifacts etc.)'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Normalize'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, NULL, NULL, @NormalizeThumbnailsChain, Null); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (6, LAST_INSERT_ID(), 'Normalize thumbnails'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @createThumbnailsDirectoryTask, 'Create thumbnails directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink2 = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Do not normalize'); +SET @NoNormalizationLinkMicroServiceChain = LAST_INSERT_ID(); + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, LAST_INSERT_ID(), 'Approve normalization'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @ProcessSubmissionDocumentationLinkMicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectSIPMicroServiceChain2); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'approveNormalization'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO WatchedDirectories (watchedDirectoryPath, chain) + VALUES ('%watchDirectoryPath%approveNormalization/preservation/', @MicroServiceChain); + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveSIP_v0.0', '"%SIPDirectory%" "%sharedPath%watchedDirectories/approveNormalization/preservation/." "%SIPUUID%" "%sharedPath%" "%SIPUUID%" "%sharedPath%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Move to approve normalization directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @RemovefileswithoutlinkinginformationTask, 'Remove files without linking information (failed normalization artifacts etc.)'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, NULL, NULL, @NormalizePreservationChain, Null); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (6, LAST_INSERT_ID(), 'Normalize preservation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @createThumbnailsDirectoryTask, 'Create thumbnails directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'manual normalization'); +SET @ManualNormalizationMicroServiceChain = LAST_INSERT_ID(); + + + + + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink2 = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Normalize for preservation'); +SET @CreateAIPMicroServiceChain = LAST_INSERT_ID(); +-- /CREATE AIP -- + + + + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, LAST_INSERT_ID(), 'Normalize'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @NoNormalizationLinkMicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @CreateDIPandAIPMicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @normalizeAccessOnlyMicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @CreateAIPMicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @normalizeAccessOnlyFromServiceMicroServiceChain); -- No normalization -- +-- Issue 741: Make manual normalization compatible with current data structure +-- INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) +-- VALUES +-- (@MicroServiceChainLink, @ManualNormalizationMicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectSIPMicroServiceChain); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +SET @normalizeOptionsForASIPWITHOUTAccessDirectory = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, LAST_INSERT_ID(), 'Normalize'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @noNormalizationWithAccessDirectoryMicroServiceChain); +-- Issue 741: Make manual normalization compatible with current data structure +-- INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) +-- VALUES +-- (@MicroServiceChainLink, @manualNormalizationWithAccessDirectoryMicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @normalizePreservationWithAccessDirectoryMicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectSIPMicroServiceChain); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +SET @normalizeOptionsForASIPWITHAccessDirectory = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, NULL, NULL); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (4, LAST_INSERT_ID(), 'Find options to normalize as'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); + + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Create DIP ?'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO WatchedDirectories (watchedDirectoryPath, chain) + VALUES ('%watchDirectoryPath%workFlowDecisions/createDip/', @MicroServiceChain); + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveSIP_v0.0', '"%SIPDirectory%" "%sharedPath%watchedDirectories/workFlowDecisions/createDip/." "%SIPUUID%" "%sharedPath%" "%SIPUUID%" "%sharedPath%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Move to workFlowDecisions-createDip directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, @normalizeOptionsForASIPWITHOUTAccessDirectory, NULL); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (3, LAST_INSERT_ID(), 'Grant normalization options for no pre-existing DIP'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NoDIPNextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, @normalizeOptionsForASIPWITHAccessDirectory, NULL); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (3, LAST_INSERT_ID(), 'Grant normalization options for pre-existing DIP'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @ADIPNextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'checkForAccessDirectory_v0.0', '--SIPDirectory "%SIPDirectory%" --accessDirectory "objects/access/" --objectsDirectory "objects/" --DIPDirectory "DIP" --SIPUUID "%SIPUUID%" --date "%date%" -c'); +SET @checkForAccessDirectoryCopyToDIPTask = LAST_INSERT_ID(); +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'checkForAccessDirectory_v0.0', '--SIPDirectory "%SIPDirectory%" --accessDirectory "objects/access/" --objectsDirectory "objects/" --DIPDirectory "DIP" --SIPUUID "%SIPUUID%" --date "%date%"' ); +SET @checkForAccessDirectoryMoveToDIPTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @checkForAccessDirectoryMoveToDIPTask, 'Check for Access directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NoDIPNextMicroServiceChainLink); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 179, @ADIPNextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'checkForServiceDirectory_v0.0', '--SIPDirectory "%SIPDirectory%" --serviceDirectory "objects/service/" --objectsDirectory "objects/" --SIPUUID "%SIPUUID%" --date "%date%"' ); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Check for Service directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Clean up names'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, '%SIPDirectory%SIPnameCleanup.log', 'sanitizeSIPName_v0.0', '"%relativeLocation%" "%SIPUUID%" "%date%" "%sharedPath%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Sanitize SIP name'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- Removed to maintain maildir -- +/* +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, '%SIPLogsDirectory%filenameCleanup.log', '%SIPLogsDirectory%filenameCleanup.log', 'sanitizeObjectNames_v0.0', '"%SIPObjectsDirectory%" "%SIPUUID%" "%date%" "%taskUUID%" "sipDirectory" "sipUUID" "%SIPDirectory%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Sanitize object\'s file and directory names'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +*/ + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'setDirectoryPermissionsForAppraisal_v0.0', '"%SIPDirectory%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Set file permissions'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Remove cache files'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', TRUE, '%SIPLogsDirectory%removeUnneededFiles.log', '%SIPLogsDirectory%removeUnneededFiles.log', 'removeUnneededFiles_v0.0', '"%relativeLocation%" "%fileUUID%" "%SIPLogsDirectory%" "%date%" "%taskUUID%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, LAST_INSERT_ID(), 'Remove cache files'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Include default SIP processingMCP.xml'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'copy_v0.0', '"%sharedPath%sharedMicroServiceTasksConfigs/processingMCPConfigs/defaultProcessingMCP.xml" "%SIPDirectory%processingMCP.xml" -n'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Include default SIP processingMCP.xml'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Rename SIP directory with SIP UUID'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveSIP_v0.0', '"%SIPDirectory%" "%processingDirectory%%SIPName%-%SIPUUID%" "%SIPUUID%" "%sharedPath%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Rename SIP directory with SIP UUID'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Failed compliance'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveSIP_v0.0', '"%SIPDirectory%" "%sharedPath%watchedDirectories/SIPCreation/SIPsUnderConstruction/." "%SIPUUID%" "%sharedPath%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Failed compliance. See output in dashboard. SIP moved back to SIPsUnderConstruction'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @FailedSIPComplianceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Verify transfer compliance'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'archivematicaVerifyMets_v0.0', '"%SIPDirectory%"'); +SET @archivematicaVerifyMetsTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @archivematicaVerifyMetsTask, 'Verify mets_structmap.xml compliance'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @FailedSIPComplianceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Verify SIP compliance'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'verifySIPCompliance_v0.0', '"%SIPDirectory%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Verify SIP compliance'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @FailedSIPComplianceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- Set file permissions +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'SIP Creation complete'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO WatchedDirectories (watchedDirectoryPath, chain, expectedType) + VALUES ('%watchDirectoryPath%system/autoProcessSIP', @MicroServiceChain, @expectedTypeSIP); + +SET @microserviceGroup = 'Approve SIP creation'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, LAST_INSERT_ID(), 'Approve SIP Creation'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @MicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectSIPMicroServiceChain); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'SIP Creation'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO WatchedDirectories (watchedDirectoryPath, chain, expectedType) + VALUES ('%watchDirectoryPath%SIPCreation/SIPsUnderConstruction', @MicroServiceChain, @expectedTypeSIP); + + + + + + + + + + + + + + + +-- <transfer> -- + +-- Move To Failed Directory -- +SET @microserviceGroup = 'Failed transfer'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveTransfer_v0.0', '"%SIPDirectory%" "%sharedPath%failed/." "%SIPUUID%" "%sharedPath%" "%SIPUUID%" "%sharedPath%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Move to the failed directory'); + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink, defaultPlaySound) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL, @errorSound); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @defaultNextChainLink = @MicroServiceChainLink; + +-- Move to processing directory -- +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveTransfer_v0.0', '"%SIPDirectory%" "%processingDirectory%." "%SIPUUID%" "%sharedPath%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Move to processing directory'); +SET @moveToProcessingDirectoryTaskConfig = LAST_INSERT_ID();; + + +-- Reject Transfer -- +SET @microserviceGroup = 'Reject transfer'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveTransfer_v0.0', '"%SIPDirectory%" "%rejectedDirectory%." "%SIPUUID%" "%sharedPath%"'); + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Move to the rejected directory'); + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Reject transfer'); +SET @rejectSIPMicroServiceChain = LAST_INSERT_ID(); + +-- 'Create SIP from Transfer' +SET @microserviceGroup = 'Create SIP from Transfer'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveTransfer_v0.0', '"%SIPDirectory%" "%sharedPath%watchedDirectories/SIPCreation/completedTransfers/." "%SIPUUID%" "%sharedPath%"'); +SET @moveToCompletedTransferTaskTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @moveToCompletedTransferTaskTask, 'Move to completedTransfers directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'createSIPfromTransferObjects_v0.0', '"%SIPObjectsDirectory%" "%SIPName%" "%SIPUUID%" "%processingDirectory%" "%sharedPath%watchedDirectories/system/autoProcessSIP/" "%sharedPath%"'); +SET @moveToCompletedTransferTaskTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @moveToCompletedTransferTaskTask, 'Create SIP from transfer objects'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Create single SIP and continue processing'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +SET @microserviceGroup = 'Create SIP from Transfer'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveTransfer_v0.0', '"%SIPDirectory%" "%sharedPath%watchedDirectories/SIPCreation/completedTransfers/." "%SIPUUID%" "%sharedPath%"'); +SET @moveToCompletedTransferTaskTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @moveToCompletedTransferTaskTask, 'Move to completedTransfers directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Create SIP from Transfer'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveTransfer_v0.0', '"%SIPDirectory%" "%sharedPath%watchedDirectories/SIPCreation/completedTransfers/." "%SIPUUID%" "%sharedPath%"'); +SET @moveToCompletedTransferTaskTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @moveToCompletedTransferTaskTask, 'Move to completedTransfers directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'createSIPfromTransferObjects_v0.0', '"%SIPObjectsDirectory%" "%SIPName%" "%SIPUUID%" "%processingDirectory%" "%sharedPath%watchedDirectories/SIPCreation/SIPsUnderConstruction/" "%sharedPath%"'); +SET @moveToCompletedTransferTaskTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @moveToCompletedTransferTaskTask, 'Create SIP from transfer objects'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Create single SIP'); +SET @MicroServiceChain2 = LAST_INSERT_ID(); + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (Null, 'Create SIP(s) manually'); +SET @MicroServiceChain3 = LAST_INSERT_ID(); + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, LAST_INSERT_ID(), 'Create SIP(s)'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @MicroServiceChain); +/*INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @MicroServiceChain2);*/ +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @MicroServiceChain3); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectSIPMicroServiceChain); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- check for objects +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'checkTransferDirectoryForObjects_v0.0', '"%SIPObjectsDirectory%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Check transfer directory for objects'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 179, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Check transfer directory for objects'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO WatchedDirectories (watchedDirectoryPath, chain, expectedType) + VALUES ('%watchDirectoryPath%SIPCreation/completedTransfers/', @MicroServiceChain, @expectedTypeTransfer); + +SET @microserviceGroup = 'Complete transfer'; +-- transfer processing complete -- +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'elasticSearchIndex_v0.0', '"%SIPDirectory%" "%SIPUUID%"'); +SET @elasticSearchIndexTaskTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @elasticSearchIndexTaskTask, 'Index transfer contents'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveTransfer_v0.0', '"%SIPDirectory%" "%sharedPath%watchedDirectories/SIPCreation/completedTransfers/." "%SIPUUID%" "%sharedPath%"'); +SET @moveToCompletedTransferTaskTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @moveToCompletedTransferTaskTask, 'Move to completedTransfers directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + + +SET @microserviceGroup = 'Characterize and extract metadata'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'loadLabelsFromCSV_v0.0', '"%SIPUUID%" "%SIPDirectory%metadata/file_labels.csv"'); +SET @loadLabelsFromCSVTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @loadLabelsFromCSVTask, 'Load labels from metadata/file_labels.csv'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, NULL, NULL, 'identifyFilesByExtension_v0.0', '"%relativeLocation%" "%fileUUID%"'); +SET @identifyFilesByExtensionTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @identifyFilesByExtensionTask, 'Identify Files ByExtension'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, NULL, NULL, 'FITS_v0.0', '"%relativeLocation%" "%SIPLogsDirectory%fileMeta/%fileUUID%.xml" "%date%" "%taskUUID%" "%fileUUID%" "%fileGrpUse%"'); +SET @CharacterizeandextractmetadataTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @CharacterizeandextractmetadataTask, 'Characterize and extract metadata'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Clean up names'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, '%SIPDirectory%SIPnameCleanup.log', 'sanitizeSIPName_v0.0', '"%relativeLocation%" "%SIPUUID%" "%date%" "%sharedPath%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Sanitize Transfer name'); +SET @SanitizeTransferNameTask = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, '%SIPLogsDirectory%filenameCleanup.log', '%SIPLogsDirectory%filenameCleanup.log', 'sanitizeObjectNames_v0.0', '"%SIPObjectsDirectory%" "%SIPUUID%" "%date%" "%taskUUID%" "transferDirectory" "transferUUID" "%SIPDirectory%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Sanitize object\'s file and directory names'); +SET @SanitizeTransferObjectsTask = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +SET @microserviceGroup = 'Scan for viruses'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, '%SIPLogsDirectory%clamAVScan.txt', 'archivematicaClamscan_v0.0', '"%fileUUID%" "%relativeLocation%" "%date%" "%taskUUID%"'); +SET @ScanforvirusesTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @ScanforvirusesTask, 'Scan for viruses'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Extract packages'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, '%SIPLogsDirectory%extraction.log', '%SIPLogsDirectory%extraction.log', 'transcoderExtractPackages_v0.0', '"%relativeLocation%" "%SIPDirectory%" "%SIPUUID%" "%date%" "%taskUUID%" "%fileUUID%"'); +SET @ExtractpackagesTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @ExtractpackagesTask, 'Extract packages'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +-- SET @NextMicroServiceChainLink = @MicroServiceChainLink; -- + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Skip quarantine'); +SET @SkipQuarantineMicroServiceChain = LAST_INSERT_ID(); + +-- quarantine -- +SET @microserviceGroup = 'Quarantine'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES -- --eventDetail --eventOutcome --eventOutcomeDetailNote -- + (NULL, NULL, 'objects', FALSE, NULL, NULL, 'createEvent_v0.0', '--eventType "unquarantine" --fileUUID "%fileUUID%" --eventIdentifierUUID "%taskUUID%" --eventDateTime "%jobCreatedDate%"'); +SET @CreateunquarantinedPREMISeventsTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @CreateunquarantinedPREMISeventsTask, 'Create unquarantine PREMIS events'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'setFilePermission_v0.0', ' -R 750 "%relativeLocation%"'); +SET @SetunquarantinedfilepermissionsonTransferTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @SetunquarantinedfilepermissionsonTransferTask, 'Set unquarantined file permissions on Transfer'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'SIP Creation complete'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +-- -- +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Unquarantine'); +SET @MicroServiceChain = LAST_INSERT_ID(); + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, @RemovefromquarantineTask, 'Remove from quarantine'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @MicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectSIPMicroServiceChain); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +SET @unquarantineDefaultTransferMagicChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, NULL, NULL); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (4, LAST_INSERT_ID(), 'Find type to remove from quarantine as'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Unquarantine'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO WatchedDirectories (watchedDirectoryPath, chain, expectedType) + VALUES ('%watchDirectoryPath%quarantined/', @MicroServiceChain, @expectedTypeTransfer); + +-- Remove from quarantine ^ -- +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveTransfer_v0.0', '"%SIPDirectory%" "%watchDirectoryPath%quarantined/." "%SIPUUID%" "%sharedPath%"'); +SET @MovetoquarantinedTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @MovetoquarantinedTask, 'Move to quarantine'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, @unquarantineDefaultTransferMagicChainLink, NULL); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (3, LAST_INSERT_ID(), 'Designate to process as a standard transfer when unquarantined'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES -- --eventDetail --eventOutcome --eventOutcomeDetailNote -- + (NULL, NULL, 'objects', FALSE, NULL, NULL, 'createEvent_v0.0', '--eventType "quarantine" --fileUUID "%fileUUID%" --eventIdentifierUUID "%taskUUID%" --eventDateTime "%jobCreatedDate%"'); +SET @CreatequarantinedPREMISeventsTask= LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @CreatequarantinedPREMISeventsTask , 'Create quarantine PREMIS events'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'setFilePermission_v0.0', '700 -R "%relativeLocation%"'); +SET @SetquarantinepermissionsontransferTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @SetquarantinepermissionsontransferTask, 'Set quarantine permissions on transfer'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Quarantine'); +SET @QuarantineMicroServiceChain = LAST_INSERT_ID(); + + +-- /quarantine -- + + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, LAST_INSERT_ID(), 'Workflow decision - send transfer to quarantine'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @QuarantineMicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @SkipQuarantineMicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectSIPMicroServiceChain); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +SET @WorkflowdecisionsendtransfertoquarantineDefaultTransferMagicChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, NULL, NULL); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (4, LAST_INSERT_ID(), 'Find type to process as'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'quarantineSIP ?'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO WatchedDirectories (watchedDirectoryPath, chain, expectedType) + VALUES ('%watchDirectoryPath%workFlowDecisions/quarantineSIP/', @MicroServiceChain, @expectedTypeTransfer); + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveTransfer_v0.0', '"%SIPDirectory%" "%sharedPath%watchedDirectories/workFlowDecisions/quarantineSIP/." "%SIPUUID%" "%sharedPath%" "%SIPUUID%" "%sharedPath%"'); +Set @MovetoworkFlowDecisionsquarantineSIPdirectory = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @MovetoworkFlowDecisionsquarantineSIPdirectory, 'Move to workFlowDecisions-quarantineSIP directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, @WorkflowdecisionsendtransfertoquarantineDefaultTransferMagicChainLink, NULL); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (3, LAST_INSERT_ID(), 'Designate to process as a standard transfer'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Generate METS.xml document'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'createMETS_v0.0', '--sipUUID "%SIPUUID%" --basePath "%SIPDirectory%" --xmlFile "%SIPLogsDirectory%"METS.xml --basePathString "transferDirectory" --fileGroupIdentifier "transferUUID"'); +SET @GenerateMETSxmldocument = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @GenerateMETSxmldocument, 'Generate METS.xml document'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +SET @bagitJoinStandardProcessingLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Verify transfer checksums'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'verifyMD5_v0.0', '"%relativeLocation%" "%checksumsNoExtention%" "%date%" "%taskUUID%" "%SIPUUID%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Verify metadata directory checksums'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Assign file UUIDs and checksums'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, NULL, NULL, 'updateSizeAndChecksum_v0.0', '--filePath "%relativeLocation%" --fileUUID "%fileUUID%" --eventIdentifierUUID "%taskUUID%" --date "%date%"'); +SET @AssignchecksumsandfilesizestoobjectsTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @AssignchecksumsandfilesizestoobjectsTask, 'Assign checksums and file sizes to objects'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', TRUE, '%SIPLogsDirectory%FileUUIDs.log', '%SIPLogsDirectory%FileUUIDsError.log', 'assignFileUUIDs_v0.0', '--transferUUID "%SIPUUID%" --sipDirectory "%SIPDirectory%" --filePath "%relativeLocation%" --fileUUID "%fileUUID%" --eventIdentifierUUID "%taskUUID%" --date "%date%"'); +SET @AssignfileUUIDstoobjects = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @AssignfileUUIDstoobjects, 'Assign file UUIDs to objects'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Do not backup transfer'); +SET @DoNotCreateTransferBackupMicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'copy_v0.0', '"%SIPDirectory%" "%sharedPath%transferBackups/." -R --preserve'); +SET @createTransferBackupTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @createTransferBackupTask, 'Create transfer backup (sharedDirectory/transferBackups)'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Backup transfer'); +SET @CreateTransferBackupMicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, LAST_INSERT_ID(), 'Workflow decision - create transfer backup'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @CreateTransferBackupMicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @DoNotCreateTransferBackupMicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectSIPMicroServiceChain); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +SET @WorkflowdecisioncreateTransferBackupDefaultTransferMagicChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, NULL, NULL); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (4, LAST_INSERT_ID(), 'Find type to process as'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'create transfer backup ?'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO WatchedDirectories (watchedDirectoryPath, chain, expectedType) + VALUES ('%watchDirectoryPath%workFlowDecisions/createTransferBackup/', @MicroServiceChain, @expectedTypeTransfer); + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveTransfer_v0.0', '"%SIPDirectory%" "%sharedPath%watchedDirectories/workFlowDecisions/createTransferBackup/." "%SIPUUID%" "%sharedPath%" "%SIPUUID%" "%sharedPath%"'); +Set @MovetoworkFlowDecisionsquarantineSIPdirectory = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @MovetoworkFlowDecisionsquarantineSIPdirectory, 'Move to workFlowDecisions-createTransferBackup directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, @WorkflowdecisioncreateTransferBackupDefaultTransferMagicChainLink, NULL); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (3, LAST_INSERT_ID(), 'Designate to process as a standard transfer'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Include default Transfer processingMCP.xml'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'copy_v0.0', '"%sharedPath%sharedMicroServiceTasksConfigs/processingMCPConfigs/defaultProcessingMCP.xml" "%SIPDirectory%processingMCP.xml" -n'); +SET @IncludedefaultTransferprocessingMCPxmlTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @IncludedefaultTransferprocessingMCPxmlTask, 'Include default Transfer processingMCP.xml'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- Rename with transfer UUID -- +SET @microserviceGroup = 'Rename with transfer UUID'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveTransfer_v0.0', '"%SIPDirectory%" "%processingDirectory%%SIPName%-%SIPUUID%" "%SIPUUID%" "%sharedPath%"'); +SET @RenamewithtransferUUIDTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @RenamewithtransferUUIDTask, 'Rename with transfer UUID'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +SET @PassedComplianceMicroServiceChainLink = @MicroServiceChainLink; + + + +SET @microserviceGroup = 'Failed transfer compliance'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveTransfer_v0.0', '"%SIPDirectory%" "%sharedPath%watchedDirectories/activeTransfers/standardTransfer/." "%SIPUUID%" "%sharedPath%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Move transfer back to activeTransfers directory.'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Move transfer back to activeTransfers directory'); +SET @MovetransferbacktoactiveTransfersdirectoryMicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'restructureForCompliance_v0.0', '"%SIPDirectory%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Attempt restructure for compliance'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Attempt restructure for compliance'); +SET @restructureForComplianceMicroServiceChain = LAST_INSERT_ID(); + + +-- choice auto restructure for compliance -- +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, LAST_INSERT_ID(), 'Attempt restructure for compliance?'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @restructureForComplianceMicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @MovetransferbacktoactiveTransfersdirectoryMicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectSIPMicroServiceChain); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +SET @failedComplianceRestructureMagicChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, NULL, NULL); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (4, LAST_INSERT_ID(), 'Find branch to continue processing'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Failed compliance'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO WatchedDirectories (watchedDirectoryPath, chain, expectedType) + VALUES ('%watchDirectoryPath%system/autoRestructureForCompliance/', @MicroServiceChain, @expectedTypeTransfer); + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveTransfer_v0.0', '"%SIPDirectory%" "%sharedPath%watchedDirectories/system/autoRestructureForCompliance/." "%SIPUUID%" "%sharedPath%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Failed compliance.'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, @failedComplianceRestructureMagicChainLink, NULL); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (3, LAST_INSERT_ID(), 'Designate to process as a standard transfer'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @FailTransferComplianceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Verify transfer compliance'; +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @archivematicaVerifyMetsTask, 'Verify mets_structmap.xml compliance'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @FailTransferComplianceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @PassedComplianceMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'verifyTransferCompliance_v0.0', '"%SIPDirectory%"'); +SET @verifyTransferComplianceTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @verifyTransferComplianceTask, 'Verify transfer compliance'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @FailTransferComplianceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'restructureForCompliance_v0.0', '"%SIPDirectory%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Attempt restructure for compliance'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'removeHiddenFilesAndDirectories_v0.0', '"%SIPDirectory%"'); +SET @removeHiddenFilesAndDirectoriesTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @removeHiddenFilesAndDirectoriesTask, 'Remove hidden files and directories'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'archivematicaSetTransferType_v0.0', '"%SIPUUID%" "Standard"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Set transfer type: Standard'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Approve transfer'); +SET @MicroServiceChain = LAST_INSERT_ID(); + + +SET @microserviceGroup = 'Approve transfer'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, LAST_INSERT_ID(), 'Approve transfer'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @MicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectSIPMicroServiceChain); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Transfers In progress'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO WatchedDirectories (watchedDirectoryPath, chain, expectedType) + VALUES ('%watchDirectoryPath%activeTransfers/standardTransfer', @MicroServiceChain, @expectedTypeTransfer); + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +-- DSPACE TRANSER -- +-- transfer processing complete -- +SET @microserviceGroup = 'Complete transfer'; +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @moveToCompletedTransferTaskTask, 'Move to completedTransfers directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Characterize and extract metadata'; +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @loadLabelsFromCSVTask, 'Load labels from metadata/file_labels.csv'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @identifyFilesByExtensionTask, 'Identify Files ByExtension'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @CharacterizeandextractmetadataTask, 'Characterize and extract metadata on objects'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Clean up names'; +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @SanitizeTransferNameTask, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @SanitizeTransferObjectsTask, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +/* +SET @microserviceGroup = 'Transfer arrangement'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + ('mets.xml', NULL, 'objects', FALSE, NULL, NULL, 'moveDspaceMetsFilesToDSpaceMETS_v0.0', '"%relativeLocation%" "%date%" "%taskUUID%" "%SIPDirectory%" "%SIPUUID%"'); +-- ('mets.xml', NULL, 'objects', FALSE, NULL, NULL, 'moveDspaceMetsFilesToDSpaceMETS_v0.0', '"%relativeLocation%" "%date%" "%taskUUID%" "%SIPDirectory%" "%fileUUID%"') +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, LAST_INSERT_ID(), 'Move DSpace Mets.xml files to DSpaceMETS directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +*/ + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + ('mets.xml', NULL, 'objects', FALSE, NULL, NULL, 'identifyDspaceMETSFiles_v0.0', '"%fileUUID%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, LAST_INSERT_ID(), 'Identify DSpace mets files'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + ('mets.xml', NULL, 'objects', FALSE, NULL, NULL, 'identifyDspaceLicenseFiles_v0.0', '"%relativeLocation%" "%date%" "%taskUUID%" "%SIPDirectory%" "%SIPUUID%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, LAST_INSERT_ID(), 'Identify DSpace license files'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + ('mets.xml', NULL, 'objects', FALSE, NULL, NULL, 'identifyDspaceTextFiles_v0.0', '"%relativeLocation%" "%date%" "%taskUUID%" "%SIPDirectory%" "%SIPUUID%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, LAST_INSERT_ID(), 'Identify DSpace text files'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +SET @microserviceGroup = 'Verify transfer checksums'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + ('mets.xml', NULL, 'objects', TRUE, '%SIPLogsDirectory%verifyChecksumsInFileSecOfDSpaceMETSFiles.log', '%SIPLogsDirectory%verifyChecksumsInFileSecOfDSpaceMETSFiles.log', 'verifyChecksumsInFileSecOfDspaceMETSFiles_v0.0', '"%relativeLocation%" "%date%" "%taskUUID%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, LAST_INSERT_ID(), 'Verify checksums in fileSec of DSpace METS files'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Scan for viruses'; +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @ScanforvirusesTask, 'Scan for viruses'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Extract packages'; +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @ExtractpackagesTask, 'Extract packages'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +-- SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Skip quarantine'); +SET @SkipQuarantineMicroServiceChain = LAST_INSERT_ID(); + +-- quarantine -- +SET @microserviceGroup = 'Quarantine'; +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @CreateunquarantinedPREMISeventsTask, 'Create unquarantine PREMIS events'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @SetunquarantinedfilepermissionsonTransferTask, 'Set unquarantined file permissions on Transfer'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'SIP Creation complete'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +-- -- +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Unquarantine'); +SET @MicroServiceChain = LAST_INSERT_ID(); + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, @RemovefromquarantineTask, 'Remove from quarantine'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @MicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectSIPMicroServiceChain); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +SET @unquarantineDefaultTransferMagicChainLink = @MicroServiceChainLink; + + +-- Remove from quarantine ^ -- + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveTransfer_v0.0', '"%SIPDirectory%" "%watchDirectoryPath%quarantined/." "%SIPUUID%" "%sharedPath%"'); +SET @MovetoquarantinedTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @MovetoquarantinedTask, 'Move to quarantined'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, @unquarantineDefaultTransferMagicChainLink, NULL); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (3, LAST_INSERT_ID(), 'Designate to process as a DSpace transfer when unquarantined'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @CreatequarantinedPREMISeventsTask , 'Create quarantine PREMIS events'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @SetquarantinepermissionsontransferTask, 'Set quarantine permissions on transfer'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Quarantine'); +SET @QuarantineMicroServiceChain = LAST_INSERT_ID(); + + +-- /quarantine -- + + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, LAST_INSERT_ID(), 'Workflow decision - send transfer to quarantine'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @QuarantineMicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @SkipQuarantineMicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectSIPMicroServiceChain); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +SET @WorkflowdecisionsendtransfertoquarantineDspace17TransferMagicChainLink = @NextMicroServiceChainLink; + + + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @MovetoworkFlowDecisionsquarantineSIPdirectory, 'Move to workFlowDecisions-quarantineSIP directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, @WorkflowdecisionsendtransfertoquarantineDspace17TransferMagicChainLink, NULL); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (3, LAST_INSERT_ID(), 'Designate to process as a DSpace transfer'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +SET @microserviceGroup = 'Generate METS.xml'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'createMETS_v0.0', '--sipUUID "%SIPUUID%" --basePath "%SIPDirectory%" --xmlFile "%SIPLogsDirectory%"METS.xml --basePathString "transferDirectory" --fileGroupIdentifier "transferUUID"'); +SET @GenerateMETSxmldocument = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @GenerateMETSxmldocument, 'Generate METS.xml document'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Verify transfer checksum'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'verifyMD5_v0.0', '"%relativeLocation%" "%checksumsNoExtention%" "%date%" "%taskUUID%" "%SIPUUID%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Verify metadata directory checksums'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Assign file UUIDs and checksums'; +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @AssignchecksumsandfilesizestoobjectsTask, 'Assign checksums and file sizes to objects'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @AssignfileUUIDstoobjects, 'Assign file UUIDs to objects'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Include default Transfer processingMCP.xml'; +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @IncludedefaultTransferprocessingMCPxmlTask, 'Include default Transfer processingMCP.xml'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @createTransferBackupTask, 'Create transfer backup (sharedDirectory/transferBackups)'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +-- Rename with transfer UUID -- +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @RenamewithtransferUUIDTask, 'Rename with transfer UUID'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveTransfer_v0.0', '"%SIPDirectory%" "%sharedPath%watchedDirectories/activeTransfers/Dspace/." "%SIPUUID%" "%sharedPath%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Failed compliance. See output in dashboard. Transfer moved back to activeTransfers.'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @FailTransferComplianceChainLink = LAST_INSERT_ID(); + +SET @microserviceGroup = 'Verify transfer compliance'; +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @archivematicaVerifyMetsTask, 'Verify mets_structmap.xml compliance'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @FailTransferComplianceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @verifyTransferComplianceTask, 'Verify transfer compliance'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @FailTransferComplianceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'archivematicaSetTransferType_v0.0', '"%SIPUUID%" "Dspace"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Set transfer type: DSpace'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Approve transfer'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +SET @microserviceGroup = 'Approve transfer'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, LAST_INSERT_ID(), 'Approve DSpace transfer'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @MicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectSIPMicroServiceChain); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'DSpace Transfers In progress'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO WatchedDirectories (watchedDirectoryPath, chain, expectedType) + VALUES ('%watchDirectoryPath%activeTransfers/Dspace', @MicroServiceChain, @expectedTypeTransfer); + +-- /DPSACE TRANSFER -- + + +-- BAGIT Transfer +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @IncludedefaultTransferprocessingMCPxmlTask, 'Include default Transfer processingMCP.xml'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @bagitJoinStandardProcessingLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, TRUE, NULL, NULL, 'verifyAndRestructureTransferBag_v0.0', '"%SIPDirectory%" "%SIPUUID%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Verify bag, and restructure for compliance'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +-- Assign file uuid's +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'updateSizeAndChecksum_v0.0', '--filePath "%relativeLocation%" --fileUUID "%fileUUID%" --eventIdentifierUUID "%taskUUID%" --date "%date%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, LAST_INSERT_ID(), 'Assign checksums and file sizes to objects'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'assignFileUUIDs_v0.0', '--transferUUID "%SIPUUID%" --sipDirectory "%SIPDirectory%" --filePath "%relativeLocation%" --fileUUID "%fileUUID%" --eventIdentifierUUID "%taskUUID%" --date "%date%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, LAST_INSERT_ID(), 'Assign file UUIDs to objects'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- Rename with transfer UUID -- +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @RenamewithtransferUUIDTask, 'Rename with transfer UUID'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +SET @chainLinkAfterZippedBagitTransferExtracted = @MicroServiceChainLink; + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Approve transfer'); +SET @MicroServiceChain = LAST_INSERT_ID(); + + +SET @microserviceGroup = 'Approve transfer'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, LAST_INSERT_ID(), 'Approve transfer'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @MicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectSIPMicroServiceChain); +-- SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'baggitDirectory Transfers In progress'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +-- watchedDirectories/activeTransfers/baggitDirectory +INSERT INTO WatchedDirectories (watchedDirectoryPath, chain, expectedType) + VALUES ('%watchDirectoryPath%activeTransfers/baggitDirectory', @MicroServiceChain, @expectedTypeTransfer); + +-- extractBag +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, TRUE, NULL, NULL, 'extractBagTransfer_v0.0', '"%SIPDirectory%" "%SIPUUID%" "%processingDirectory%" %sharedPath%'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Extract zipped bag transfer'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @chainLinkAfterZippedBagitTransferExtracted); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Approve transfer'); +SET @MicroServiceChain = LAST_INSERT_ID(); + + +SET @microserviceGroup = 'Approve transfer'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, LAST_INSERT_ID(), 'Approve bagit transfer'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @MicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectSIPMicroServiceChain); +-- SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'baggitZippedFile Transfers In progress'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +-- watchedDirectories/activeTransfers/baggitZippedDirectory +INSERT INTO WatchedDirectories (watchedDirectoryPath, chain, expectedType, onlyActOnDirectories) + VALUES ('%watchDirectoryPath%activeTransfers/baggitZippedDirectory', @MicroServiceChain, @expectedTypeTransfer, FALSE); +-- /BAGIT Transfer + + + + + + + + + + + + + +-- MAILDIR transfer +SET @microserviceGroup = 'Complete transfer'; +-- transfer processing complete -- +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @moveToCompletedTransferTaskTask, 'Move to completedTransfers directory'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, NULL); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + + +SET @microserviceGroup = 'Characterize and extract metadata'; + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects/attachments', FALSE, NULL, NULL, 'identifyFilesByExtension_v0.0', '"%relativeLocation%" "%fileUUID%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, LAST_INSERT_ID(), 'Identify Files ByExtension'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects/attachments', FALSE, NULL, NULL, 'FITS_v0.0', '"%relativeLocation%" "%SIPLogsDirectory%fileMeta/%fileUUID%.xml" "%date%" "%taskUUID%" "%fileUUID%" "%fileGrpUse%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, LAST_INSERT_ID(), 'Characterize and extract metadata'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Clean up names'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, '%SIPDirectory%SIPnameCleanup.log', 'sanitizeSIPName_v0.0', '"%relativeLocation%" "%SIPUUID%" "%date%" "%sharedPath%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Sanitize Transfer name'); +SET @SanitizeTransferNameTask = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects/attachments', FALSE, '%SIPLogsDirectory%filenameCleanup.log', '%SIPLogsDirectory%filenameCleanup.log', 'sanitizeObjectNames_v0.0', '"%SIPObjectsDirectory%attachments/" "%SIPUUID%" "%date%" "%taskUUID%" "transferDirectory" "transferUUID" "%SIPDirectory%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Sanitize object\'s file and directory names'); +SET @SanitizeTransferObjectsTask = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +SET @microserviceGroup = 'Scan for viruses'; +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @ScanforvirusesTask, 'Scan for viruses'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Extract packages'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects/attachments', FALSE, '%SIPLogsDirectory%extraction.log', '%SIPLogsDirectory%extraction.log', 'transcoderExtractPackages_v0.0', '"%relativeLocation%" "%SIPDirectory%" "%SIPUUID%" "%date%" "%taskUUID%" "%fileUUID%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, LAST_INSERT_ID(), 'Extract packages'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, NULL, NULL, 'extractMaildirAttachments_v0.0', '"%SIPDirectory%" "%SIPUUID%" "%date%"'); +SET @ExtractpackagesTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @ExtractpackagesTask, 'Extract attachments'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + + +SET @microserviceGroup = 'Generate METS.xml document'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'createMETS_v0.0', '--sipUUID "%SIPUUID%" --basePath "%SIPDirectory%" --xmlFile "%SIPLogsDirectory%"METS.xml --basePathString "transferDirectory" --fileGroupIdentifier "transferUUID"'); +SET @GenerateMETSxmldocument = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @GenerateMETSxmldocument, 'Generate METS.xml document'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +SET @bagitJoinStandardProcessingLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Verify transfer checksums'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'verifyMD5_v0.0', '"%relativeLocation%" "%checksumsNoExtention%" "%date%" "%taskUUID%" "%SIPUUID%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Verify metadata directory checksums'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Assign file UUIDs and checksums'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', FALSE, NULL, NULL, 'updateSizeAndChecksum_v0.0', '--filePath "%relativeLocation%" --fileUUID "%fileUUID%" --eventIdentifierUUID "%taskUUID%" --date "%date%"'); +SET @AssignchecksumsandfilesizestoobjectsTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @AssignchecksumsandfilesizestoobjectsTask, 'Assign checksums and file sizes to objects'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, 'objects', TRUE, '%SIPLogsDirectory%FileUUIDs.log', '%SIPLogsDirectory%FileUUIDsError.log', 'assignFileUUIDs_v0.0', '--transferUUID "%SIPUUID%" --sipDirectory "%SIPDirectory%" --filePath "%relativeLocation%" --fileUUID "%fileUUID%" --eventIdentifierUUID "%taskUUID%" --date "%date%"'); +SET @AssignfileUUIDstoobjects = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (1, @AssignfileUUIDstoobjects, 'Assign file UUIDs to objects'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +SET @microserviceGroup = 'Include default Transfer processingMCP.xml'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'copy_v0.0', '"%sharedPath%sharedMicroServiceTasksConfigs/processingMCPConfigs/defaultProcessingMCP.xml" "%SIPDirectory%processingMCP.xml" -n'); +SET @IncludedefaultTransferprocessingMCPxmlTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @IncludedefaultTransferprocessingMCPxmlTask, 'Include default Transfer processingMCP.xml'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- Rename with transfer UUID -- +SET @microserviceGroup = 'Rename with transfer UUID'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'moveTransfer_v0.0', '"%SIPDirectory%" "%processingDirectory%%SIPName%-%SIPUUID%" "%SIPUUID%" "%sharedPath%"'); +SET @RenamewithtransferUUIDTask = LAST_INSERT_ID(); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, @RenamewithtransferUUIDTask, 'Rename with transfer UUID'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; +SET @PassedComplianceMicroServiceChainLink = @MicroServiceChainLink; + + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'restructureForComplianceMaildir_v0.0', '"%SIPDirectory%"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Attempt restructure for compliance'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + + +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, 'archivematicaSetTransferType_v0.0', '"%SIPUUID%" "Maildir"'); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (0, LAST_INSERT_ID(), 'Set transfer type: Maildir'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), @NextMicroServiceChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +-- move to processing directory -- +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @moveToProcessingDirectoryTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, @setFilePermissionsTaskConfig, @defaultNextChainLink); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainLinksExitCodes (microServiceChainLink, exitCode, nextMicroServiceChainLink) + VALUES (@MicroServiceChainLink, 0, @NextMicroServiceChainLink); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Approve transfer'); +SET @MicroServiceChain = LAST_INSERT_ID(); + + +SET @microserviceGroup = 'Approve transfer'; +INSERT INTO StandardTasksConfigs (filterFileEnd, filterFileStart, filterSubDir, requiresOutputLock, standardOutputFile, standardErrorFile, execute, arguments) + VALUES + (NULL, NULL, NULL, FALSE, NULL, NULL, '', ''); +INSERT INTO TasksConfigs (taskType, taskTypePKReference, description) + VALUES + (2, LAST_INSERT_ID(), 'Approve transfer'); +INSERT INTO MicroServiceChainLinks (microserviceGroup, currentTask, defaultNextChainLink) + VALUES (@microserviceGroup, LAST_INSERT_ID(), NULL); +SET @MicroServiceChainLink = LAST_INSERT_ID(); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @MicroServiceChain); +INSERT INTO MicroServiceChainChoice (choiceAvailableAtLink, chainAvailable) + VALUES + (@MicroServiceChainLink, @rejectSIPMicroServiceChain); +SET @NextMicroServiceChainLink = @MicroServiceChainLink; + + +INSERT INTO MicroServiceChains (startingLink, description) VALUES (@MicroServiceChainLink, 'Transfers In progress'); +SET @MicroServiceChain = LAST_INSERT_ID(); + +INSERT INTO WatchedDirectories (watchedDirectoryPath, chain, expectedType) + VALUES ('%watchDirectoryPath%activeTransfers/maildir', @MicroServiceChain, @expectedTypeTransfer); +-- /MAILDIR transfer + + + +-- VIEWS -- +CREATE OR REPLACE VIEW taskDurationsView AS + SELECT + taskUUID, + UNIX_TIMESTAMP(endTime) - UNIX_TIMESTAMP(startTime) AS 'Time_spent_processing_in_seconds', + SEC_TO_TIME(UNIX_TIMESTAMP(endTime) - UNIX_TIMESTAMP(startTime)) AS 'Time_spent_processing' + FROM + Tasks + ORDER BY + UNIX_TIMESTAMP(endTime) - UNIX_TIMESTAMP(startTime) DESC +; + +CREATE OR REPLACE VIEW jobDurationsView AS + SELECT + Jobs.jobUUID, + sum(taskDurationsView.Time_spent_processing_in_seconds) AS 'Time_spent_processing_in_seconds', + SEC_TO_TIME(sum(taskDurationsView.Time_spent_processing_in_seconds)) AS 'Time_spent_processing', + Jobs.createdTime, + Jobs.createdTimeDec, + min(Tasks.startTime) AS 'startTime', + max(Tasks.endTime) AS 'endTime', + UNIX_TIMESTAMP(max(Tasks.endTime)) - UNIX_TIMESTAMP(min(Tasks.startTime)) AS 'time_from_start_of_processing_till_end_of_processing_in_seconds', + UNIX_TIMESTAMP(min(Tasks.startTime)) - UNIX_TIMESTAMP(Jobs.createdTime) AS 'time_from_job_created_till_start_of_processing_in_seconds', + UNIX_TIMESTAMP(max(Tasks.endTime)) - UNIX_TIMESTAMP(Jobs.createdTime) AS 'time_from_job_created_till_end_of_processing_in_seconds', + Jobs.jobType, + Jobs.directory, + Jobs.SIPUUID, + Jobs.unitType, + Jobs.currentStep, + Jobs.microserviceGroup, + Jobs.MicroServiceChainLinksPK + FROM + Jobs + Join Tasks ON Tasks.jobUUID = Jobs.jobUUID + Join taskDurationsView ON Tasks.taskUUID = taskDurationsView.taskUUID + GROUP BY + Jobs.jobUUID + ORDER BY + sum(taskDurationsView.Time_spent_processing_in_seconds) DESC +; + +-- SELECT count(taskUUID) from lastJobsTasks; +CREATE OR REPLACE VIEW transfersAndSIPs AS + SELECT + sipUUID AS 'unitUUID', + 'SIP' AS 'unitType', + currentPath AS 'currentLocation' + FROM SIPs + UNION ALL + SELECT + transferUUID as 'unitUUID', + 'Transfer' AS 'unitType', + currentLocation AS 'currentLocation' + FROM Transfers +; + + +CREATE OR REPLACE VIEW FilesByUnit AS + SELECT fileUUID , originalLocation , currentLocation , sipUUID AS 'unitUUID', 'SIP' AS 'unitType', removedTime , enteredSystem , fileSize, checksum , fileGrpUse, fileGrpUUID, label FROM Files + UNION ALL + SELECT fileUUID , originalLocation , currentLocation , transferUUID AS 'unitUUID', 'Transfer' AS 'unitType', removedTime , enteredSystem , fileSize, checksum , fileGrpUse, fileGrpUUID, label FROM Files +; + +CREATE OR REPLACE VIEW lastJobsTasks AS SELECT * FROM Tasks WHERE jobUUID = (SELECT jobUUID FROM Jobs ORDER BY createdTime DESC, createdTimeDec DESC LIMIT 1); + +CREATE OR REPLACE VIEW lastJobsInfo AS SELECT count(taskUUID) AS 'Completed Tasks', min(startTime), max(endTime), TIMEDIFF(UTC_TIMESTAMP(), min(startTime)) AS 'Job duration', TIMEDIFF(UTC_TIMESTAMP(), max(endTime)) AS 'Time Since last return', TIME(TIMEDIFF(UTC_TIMESTAMP(), min(startTime))/count(taskUUID)) AS 'AVG proc time' FROM lastJobsTasks where startTime != 0; + +CREATE OR REPLACE VIEW processingDurationInformation2 AS + SELECT + Jobs.sipUUID AS 'sipUUID', + SEC_TO_TIME(max(UNIX_TIMESTAMP(d.endTime)) - min(UNIX_TIMESTAMP(d.startTime))) AS 'Time_spent_in_system', + min(UNIX_TIMESTAMP(d.startTime)) AS 'startedTime' + FROM + Tasks AS d + JOIN Jobs on d.jobUUID = Jobs.jobUUID + WHERE + endtime != 0 + AND startTime != 0 + GROUP BY + Jobs.sipUUID +; + +CREATE OR REPLACE VIEW processingDurationInformationByClient AS +SELECT + Jobs.sipUUID AS 'SIP_OR_TRANSFER_UUID', + client, + Durations.Time_spent_in_system, + SEC_TO_TIME(sum((UNIX_TIMESTAMP(endTime) - UNIX_TIMESTAMP(startTime)))) AS 'Time_spent_processing', + count(Tasks.taskUUID) AS 'Number_of_tasks', + Transfers.currentLocation, + SIPs.currentPath +From + Tasks + JOIN Jobs on Tasks.jobUUID = Jobs.jobUUID + LEFT OUTER JOIN processingDurationInformation2 AS Durations ON Jobs.sipUUID = Durations.sipUUID + LEFT OUTER JOIN Transfers ON Jobs.sipUUID = Transfers.transferUUID + LEFT OUTER JOIN SIPs ON Jobs.sipUUID = SIPs.sipUUID +WHERE + endtime != 0 + AND startTime != 0 +GROUP BY + client, + Jobs.sipUUID +ORDER BY + Durations.startedTime, + Jobs.sipUUID +; + +CREATE OR REPLACE VIEW processingDurationInformation AS +SELECT + Jobs.sipUUID AS 'SIP_OR_TRANSFER_UUID', + client, + Durations.Time_spent_in_system, + SEC_TO_TIME(sum((UNIX_TIMESTAMP(endTime) - UNIX_TIMESTAMP(startTime)))) AS 'Time_spent_processing', + count(Tasks.taskUUID) AS 'Number_of_tasks', + Transfers.currentLocation, + SIPs.currentPath +From + Tasks + JOIN Jobs on Tasks.jobUUID = Jobs.jobUUID + LEFT OUTER JOIN processingDurationInformation2 AS Durations ON Jobs.sipUUID = Durations.sipUUID + LEFT OUTER JOIN Transfers ON Jobs.sipUUID = Transfers.transferUUID + LEFT OUTER JOIN SIPs ON Jobs.sipUUID = SIPs.sipUUID +WHERE + endtime != 0 + AND startTime != 0 +GROUP BY + Jobs.sipUUID +ORDER BY + Durations.startedTime, + Jobs.sipUUID +; + + + +CREATE OR REPLACE VIEW PDI_by_unit AS + SELECT + SIP_OR_TRANSFER_UUID, + FilesByUnit.unitType, + SEC_TO_TIME(sum(TIME_TO_SEC(Time_spent_processing))) AS 'Total time processing', + Number_of_tasks, + SEC_TO_TIME(sum(TIME_TO_SEC(Time_spent_processing))/Number_of_tasks) AS 'Average time per task', + sum( FilesByUnit.fileSize) AS 'total file size', + count( FilesByUnit.fileUUID) AS 'number of files', + count( DISTINCT FilesByUnit.fileUUID), + sum( FilesByUnit.fileSize)/count( FilesByUnit.fileUUID)/1000 AS 'average file size KB', + sum( FilesByUnit.fileSize)/count( FilesByUnit.fileUUID)/1000000 AS 'average file size MB', + SEC_TO_TIME(TIME_TO_SEC(SEC_TO_TIME(sum(TIME_TO_SEC(Time_spent_processing))/Number_of_tasks))/ (sum( FilesByUnit.fileSize)/count( FilesByUnit.fileUUID)/1000000)) AS 'time per task per MB', + processingDurationInformation.currentLocation, + processingDurationInformation.currentPath + FROM + processingDurationInformation + JOIN FilesByUnit ON + processingDurationInformation.SIP_OR_TRANSFER_UUID = FilesByUnit.unitUUID + GROUP BY + SIP_OR_TRANSFER_UUID +; + +CREATE OR REPLACE VIEW FileExtensions AS + SELECT FileUUID, SUBSTRING_INDEX(SUBSTRING_INDEX(currentLocation,'/',-1),'.',-1) AS extension FROM Files WHERE removedTime = 0 +; + + +CREATE OR REPLACE VIEW developmentAide_choicesDisplayed AS + SELECT MicroServiceChainChoice.*, TasksConfigs.description AS 'Text', MicroServiceChains.description AS 'Choice' + FROM MicroServiceChainChoice + JOIN MicroServiceChainLinks ON MicroServiceChainChoice.choiceAvailableAtLink = MicroServiceChainLinks.pk + JOIN TasksConfigs ON MicroServiceChainLinks.currentTask = TasksConfigs.pk + JOIN MicroServiceChains ON MicroServiceChainChoice.chainAvailable = MicroServiceChains.pk +; + + +-- /VIEWS -- + +-- Dashboard schema -- + +BEGIN; + +SET FOREIGN_KEY_CHECKS = 0; + +DROP TABLE IF EXISTS `auth_message`; +DROP TABLE IF EXISTS `auth_user`; +DROP TABLE IF EXISTS `auth_user_groups`; +DROP TABLE IF EXISTS `auth_user_user_permissions`; +DROP TABLE IF EXISTS `auth_group`; +DROP TABLE IF EXISTS `auth_group_permissions`; +DROP TABLE IF EXISTS `auth_permission`; +CREATE TABLE `auth_permission` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `name` varchar(50) NOT NULL, + `content_type_id` integer NOT NULL, + `codename` varchar(100) NOT NULL, + UNIQUE (`content_type_id`, `codename`) +) +; +ALTER TABLE `auth_permission` ADD CONSTRAINT `content_type_id_refs_id_728de91f` FOREIGN KEY (`content_type_id`) REFERENCES `django_content_type` (`id`); +CREATE TABLE `auth_group_permissions` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `group_id` integer NOT NULL, + `permission_id` integer NOT NULL, + UNIQUE (`group_id`, `permission_id`) +) +; +ALTER TABLE `auth_group_permissions` ADD CONSTRAINT `permission_id_refs_id_a7792de1` FOREIGN KEY (`permission_id`) REFERENCES `auth_permission` (`id`); +CREATE TABLE `auth_group` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `name` varchar(80) NOT NULL UNIQUE +) +; +ALTER TABLE `auth_group_permissions` ADD CONSTRAINT `group_id_refs_id_3cea63fe` FOREIGN KEY (`group_id`) REFERENCES `auth_group` (`id`); +CREATE TABLE `auth_user_user_permissions` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `user_id` integer NOT NULL, + `permission_id` integer NOT NULL, + UNIQUE (`user_id`, `permission_id`) +) +; +ALTER TABLE `auth_user_user_permissions` ADD CONSTRAINT `permission_id_refs_id_67e79cb` FOREIGN KEY (`permission_id`) REFERENCES `auth_permission` (`id`); +CREATE TABLE `auth_user_groups` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `user_id` integer NOT NULL, + `group_id` integer NOT NULL, + UNIQUE (`user_id`, `group_id`) +) +; +ALTER TABLE `auth_user_groups` ADD CONSTRAINT `group_id_refs_id_f0ee9890` FOREIGN KEY (`group_id`) REFERENCES `auth_group` (`id`); +CREATE TABLE `auth_user` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `username` varchar(30) NOT NULL UNIQUE, + `first_name` varchar(30) NOT NULL, + `last_name` varchar(30) NOT NULL, + `email` varchar(75) NOT NULL, + `password` varchar(128) NOT NULL, + `is_staff` bool NOT NULL, + `is_active` bool NOT NULL, + `is_superuser` bool NOT NULL, + `last_login` datetime NOT NULL, + `date_joined` datetime NOT NULL +) +; +ALTER TABLE `auth_user_user_permissions` ADD CONSTRAINT `user_id_refs_id_f2045483` FOREIGN KEY (`user_id`) REFERENCES `auth_user` (`id`); +ALTER TABLE `auth_user_groups` ADD CONSTRAINT `user_id_refs_id_831107f1` FOREIGN KEY (`user_id`) REFERENCES `auth_user` (`id`); +CREATE TABLE `auth_message` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `user_id` integer NOT NULL, + `message` longtext NOT NULL +) +; +ALTER TABLE `auth_message` ADD CONSTRAINT `user_id_refs_id_9af0b65a` FOREIGN KEY (`user_id`) REFERENCES `auth_user` (`id`); +CREATE INDEX `auth_permission_e4470c6e` ON `auth_permission` (`content_type_id`); +CREATE INDEX `auth_message_fbfc09f1` ON `auth_message` (`user_id`); +DROP TABLE IF EXISTS `django_content_type`; +CREATE TABLE `django_content_type` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `name` varchar(100) NOT NULL, + `app_label` varchar(100) NOT NULL, + `model` varchar(100) NOT NULL, + UNIQUE (`app_label`, `model`) +) +; +CREATE TABLE `django_session` ( + `session_key` varchar(40) NOT NULL PRIMARY KEY, + `session_data` longtext NOT NULL, + `expire_date` datetime NOT NULL +) +; +CREATE INDEX `django_session_c25c2c28` ON `django_session` (`expire_date`); + +SET FOREIGN_KEY_CHECKS = 1; + +COMMIT; + +-- END Dashboard schema -- diff --git a/src/MCPServer/share/preMCPLogging.sh b/src/MCPServer/share/preMCPLogging.sh new file mode 100755 index 0000000000..da286d0739 --- /dev/null +++ b/src/MCPServer/share/preMCPLogging.sh @@ -0,0 +1,32 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPServer +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +databaseName="MCP" +username="demo" +password="demo" + +mysql -hlocalhost -uroot --execute="DROP DATABASE IF EXISTS ${databaseName}" +mysql -hlocalhost -uroot --execute="CREATE DATABASE ${databaseName} CHARACTER SET utf8 COLLATE utf8_unicode_ci" +mysql -hlocalhost -uroot --execute="source ./mysql" "$databaseName" +mysql -hlocalhost -uroot --execute="DROP USER '${username}'@'localhost'" +mysql -hlocalhost -uroot --execute="CREATE USER '${username}'@'localhost' IDENTIFIED BY '${password}'" +mysql -hlocalhost -uroot --execute="GRANT SELECT, UPDATE, INSERT, DELETE ON ${databaseName}.* TO '${username}'@'localhost'" diff --git a/src/MCPServer/sharedDirectoryStructure/DIPbackups/.gitignore b/src/MCPServer/sharedDirectoryStructure/DIPbackups/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/DIPbackups/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/SIPbackups/.gitignore b/src/MCPServer/sharedDirectoryStructure/SIPbackups/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/SIPbackups/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/archivematicaConfigs/README b/src/MCPServer/sharedDirectoryStructure/archivematicaConfigs/README new file mode 100755 index 0000000000..42c3376ccf --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/archivematicaConfigs/README @@ -0,0 +1,3 @@ +#This folder should be a link to /etc/archivematica/ on the server machine +#please run: +ln /etc/archivematica/ ./ diff --git a/src/MCPServer/sharedDirectoryStructure/arrange/.gitignore b/src/MCPServer/sharedDirectoryStructure/arrange/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/arrange/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/currentlyProcessing/.gitignore b/src/MCPServer/sharedDirectoryStructure/currentlyProcessing/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/currentlyProcessing/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/failed/.gitignore b/src/MCPServer/sharedDirectoryStructure/failed/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/failed/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/rejected/.gitignore b/src/MCPServer/sharedDirectoryStructure/rejected/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/rejected/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/createXmlEventsAssist/organization.ini b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/createXmlEventsAssist/organization.ini new file mode 100644 index 0000000000..e49f15392b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/createXmlEventsAssist/organization.ini @@ -0,0 +1,5 @@ +[organization] +yourAgentIdentifierType= repository code +yourAgentIdentifierValue= ORG +yourAgentName= Your Organization Name Here +yourAgentType= organization diff --git a/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/generateAIP/bagit/bag-info.txt b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/generateAIP/bagit/bag-info.txt new file mode 100644 index 0000000000..3f5f5613ed --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/generateAIP/bagit/bag-info.txt @@ -0,0 +1,5 @@ +External-Description: A brief explanation of the contents and provenance. +External-Identifier: A sender-supplied identifier for the bag. +Bag-Group-Identifier: A sender-supplied identifier for the set, if any, of bags to which it logically belongs. This identifier must be unique across the sender's content, and if recognizable as belonging to a globally unique scheme, the receiver should make an effort to honor reference to it. +Internal-Sender-Identifier: An alternate sender-specific identifier for the content and/or bag. +Internal-Sender-Description: A sender-local prose description of the contents of the bag. \ No newline at end of file diff --git a/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/defaultProcessingMCP.xml b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/defaultProcessingMCP.xml new file mode 100644 index 0000000000..6ddacfd461 --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/defaultProcessingMCP.xml @@ -0,0 +1,25 @@ +<processingMCP> + <preconfiguredChoices> + <preconfiguredChoice> + <appliesTo>Workflow decision - create transfer backup</appliesTo> + <goToChain>Do not backup transfer</goToChain> + </preconfiguredChoice> + <preconfiguredChoice> + <appliesTo>Workflow decision - send transfer to quarantine</appliesTo> + <goToChain>Skip quarantine</goToChain> + </preconfiguredChoice> + <preconfiguredChoice> + <appliesTo>Remove from quarantine</appliesTo> + <goToChain>Unquarantine</goToChain> + <delay unitCtime="yes">50</delay> + </preconfiguredChoice> + <preconfiguredChoice> + <appliesTo>Select compression algorithm</appliesTo> + <goToChain>lzma</goToChain> + </preconfiguredChoice> + <preconfiguredChoice> + <appliesTo>Select compression level</appliesTo> + <goToChain>5 - normal compression mode</goToChain> + </preconfiguredChoice> + </preconfiguredChoices> +</processingMCP> \ No newline at end of file diff --git a/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/NoApprovals_processingMCP.xml b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/NoApprovals_processingMCP.xml new file mode 100755 index 0000000000..e36e04c7e7 --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/NoApprovals_processingMCP.xml @@ -0,0 +1,33 @@ +<processingMCP> + <preconfiguredChoices> + <preconfiguredChoice> + <appliesTo>Workflow decision - send transfer to quarantine</appliesTo> + <goToChain>Skip quarantine</goToChain> + </preconfiguredChoice> + <preconfiguredChoice> + <appliesTo>Create SIP from transfer objects</appliesTo> + <goToChain>Create SIP from transfer objects</goToChain> + </preconfiguredChoice> + <preconfiguredChoice> + <appliesTo>Normalize</appliesTo> + <goToChain>Preservation and access</goToChain> + </preconfiguredChoice> + <preconfiguredChoice> + <appliesTo>Approve normalization</appliesTo> + <goToChain>Process submission documentation and prepare DIP and AIP.</goToChain> + </preconfiguredChoice> + <preconfiguredChoice> + <appliesTo>uploadDIP</appliesTo> + <goToChain>Upload DIP</goToChain> + </preconfiguredChoice> + <preconfiguredChoice> + <appliesTo>Store AIP</appliesTo> + <goToChain>Store AIP</goToChain> + </preconfiguredChoice> + <preconfiguredChoice> + <appliesTo>Remove from quarantine</appliesTo> + <goToChain>Unquarantine</goToChain> + <delay unitCtime="yes">50</delay> + </preconfiguredChoice> + </preconfiguredChoices> +</processingMCP> diff --git a/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/SIP_AIPAndDIP_processingMCP.xml b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/SIP_AIPAndDIP_processingMCP.xml new file mode 100755 index 0000000000..6a99c35bed --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/SIP_AIPAndDIP_processingMCP.xml @@ -0,0 +1,8 @@ +<processingMCP> + <preconfiguredChoices> + <preconfiguredChoice> + <appliesTo>Workflow decision - Create DIP?</appliesTo> + <goToChain>Create DIP and AIP</goToChain> + </preconfiguredChoice> + </preconfiguredChoices> +</processingMCP> diff --git a/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/SIP_NoDIP(onlyAIP)_processingMCP.xml b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/SIP_NoDIP(onlyAIP)_processingMCP.xml new file mode 100755 index 0000000000..0220ec6f53 --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/SIP_NoDIP(onlyAIP)_processingMCP.xml @@ -0,0 +1,8 @@ +<processingMCP> + <preconfiguredChoices> + <preconfiguredChoice> + <appliesTo>Workflow decision - Create DIP?</appliesTo> + <goToChain>Create AIP</goToChain> + </preconfiguredChoice> + </preconfiguredChoices> +</processingMCP> diff --git a/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/Transfer_NoQuarantine_processingMCP.xml b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/Transfer_NoQuarantine_processingMCP.xml new file mode 100755 index 0000000000..d08f7d9fbb --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/Transfer_NoQuarantine_processingMCP.xml @@ -0,0 +1,8 @@ +<processingMCP> + <preconfiguredChoices> + <preconfiguredChoice> + <appliesTo>Workflow decision - send transfer to quarantine</appliesTo> + <goToChain>Skip quarantine</goToChain> + </preconfiguredChoice> + </preconfiguredChoices> +</processingMCP> diff --git a/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/Transfer_Quarantine_processingMCP.xml b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/Transfer_Quarantine_processingMCP.xml new file mode 100755 index 0000000000..5dfd4ce953 --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/Transfer_Quarantine_processingMCP.xml @@ -0,0 +1,8 @@ +<processingMCP> + <preconfiguredChoices> + <preconfiguredChoice> + <appliesTo>Workflow decision - send transfer to quarantine</appliesTo> + <goToChain>Quarantine</goToChain> + </preconfiguredChoice> + </preconfiguredChoices> +</processingMCP> diff --git a/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/defaultSIPProcessingMCP-CVA.xml b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/defaultSIPProcessingMCP-CVA.xml new file mode 100644 index 0000000000..5f6c6a200d --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/defaultSIPProcessingMCP-CVA.xml @@ -0,0 +1,12 @@ +<processingMCP> + <preconfiguredChoices> + <preconfiguredChoice> + <appliesTo>Normalize</appliesTo> + <goToChain>Access only</goToChain> + </preconfiguredChoice> + <preconfiguredChoice> + <appliesTo>Approve normalization</appliesTo> + <goToChain>Approve normalization</goToChain> + </preconfiguredChoice> + </preconfiguredChoices> +</processingMCP> diff --git a/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/defaultTransferProcessingMCP-CVA.xml b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/defaultTransferProcessingMCP-CVA.xml new file mode 100755 index 0000000000..631ffa4e85 --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/processingMCPConfigs/sampleProcessingMCPs/defaultTransferProcessingMCP-CVA.xml @@ -0,0 +1,21 @@ +<processingMCP> + <preconfiguredChoices> + <preconfiguredChoice> + <appliesTo>Workflow decision - create transfer backup</appliesTo> + <goToChain>Do not create transfer backup</goToChain> + </preconfiguredChoice> + <preconfiguredChoice> + <appliesTo>Workflow decision - send transfer to quarantine</appliesTo> + <goToChain>Skip quarantine</goToChain> + </preconfiguredChoice> + <preconfiguredChoice> + <appliesTo>Remove from quarantine</appliesTo> + <goToChain>Unquarantine</goToChain> + <delay unitCtime="yes">50</delay> + </preconfiguredChoice> + <preconfiguredChoice> + <appliesTo>Create SIP(s)</appliesTo> + <goToChain>Create single SIP and continue processing</goToChain> + </preconfiguredChoice> + </preconfiguredChoices> +</processingMCP> diff --git a/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/transcoder/defaultIcons/default.jpg b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/transcoder/defaultIcons/default.jpg new file mode 100644 index 0000000000..232222cd0b Binary files /dev/null and b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/transcoder/defaultIcons/default.jpg differ diff --git a/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/transcoder/sources b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/transcoder/sources new file mode 100644 index 0000000000..4533c39d39 --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/sharedMicroServiceTasksConfigs/transcoder/sources @@ -0,0 +1,2 @@ +default.jpg is a jpg conversion of the GPL ubuntu precise file: /usr/share/icons/LowContrast/48x48/stock/document-new.png + diff --git a/src/MCPServer/sharedDirectoryStructure/transferBacklog/arrange/.gitignore b/src/MCPServer/sharedDirectoryStructure/transferBacklog/arrange/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/transferBacklog/arrange/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/transferBacklog/original/.gitignore b/src/MCPServer/sharedDirectoryStructure/transferBacklog/original/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/transferBacklog/original/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/transferBackups/arrange/.gitignore b/src/MCPServer/sharedDirectoryStructure/transferBackups/arrange/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/transferBackups/arrange/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/transferBackups/originals/.gitignore b/src/MCPServer/sharedDirectoryStructure/transferBackups/originals/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/transferBackups/originals/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/SIPCreation/SIPsUnderConstruction/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/SIPCreation/SIPsUnderConstruction/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/SIPCreation/SIPsUnderConstruction/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/SIPCreation/completedTransfers/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/SIPCreation/completedTransfers/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/SIPCreation/completedTransfers/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/activeTransfers/Dspace/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/activeTransfers/Dspace/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/activeTransfers/Dspace/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/activeTransfers/baggitDirectory/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/activeTransfers/baggitDirectory/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/activeTransfers/baggitDirectory/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/activeTransfers/baggitZippedDirectory/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/activeTransfers/baggitZippedDirectory/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/activeTransfers/baggitZippedDirectory/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/activeTransfers/maildir/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/activeTransfers/maildir/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/activeTransfers/maildir/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/activeTransfers/standardTransfer/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/activeTransfers/standardTransfer/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/activeTransfers/standardTransfer/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/approveNormalization/preservation/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/approveNormalization/preservation/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/approveNormalization/preservation/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/approveNormalization/preservationAndAccess/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/approveNormalization/preservationAndAccess/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/approveNormalization/preservationAndAccess/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/approveSubmissionDocumentationIngest/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/approveSubmissionDocumentationIngest/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/approveSubmissionDocumentationIngest/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/quarantined/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/quarantined/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/quarantined/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/storeAIP/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/storeAIP/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/storeAIP/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/system/autoProcessSIP/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/system/autoProcessSIP/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/system/autoProcessSIP/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/system/autoRestructureForCompliance/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/system/autoRestructureForCompliance/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/system/autoRestructureForCompliance/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/uploadDIP/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/uploadDIP/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/uploadDIP/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/uploadedDIPs/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/uploadedDIPs/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/uploadedDIPs/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/watchedDirectories/system/autoProcessSIP/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/watchedDirectories/system/autoProcessSIP/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/watchedDirectories/system/autoProcessSIP/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/workFlowDecisions/createDip/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/workFlowDecisions/createDip/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/workFlowDecisions/createDip/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/workFlowDecisions/createTransferBackup/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/workFlowDecisions/createTransferBackup/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/workFlowDecisions/createTransferBackup/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/watchedDirectories/workFlowDecisions/quarantineSIP/.gitignore b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/workFlowDecisions/quarantineSIP/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/watchedDirectories/workFlowDecisions/quarantineSIP/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPServer/sharedDirectoryStructure/www/AIPsStore/.gitignore b/src/MCPServer/sharedDirectoryStructure/www/AIPsStore/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/MCPServer/sharedDirectoryStructure/www/AIPsStore/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/MCPrpcCLI/debian/archivematica-mcp-rpccli.install b/src/MCPrpcCLI/debian/archivematica-mcp-rpccli.install new file mode 100644 index 0000000000..0a5cfdeb7a --- /dev/null +++ b/src/MCPrpcCLI/debian/archivematica-mcp-rpccli.install @@ -0,0 +1,5 @@ +lib/* /usr/share/pyshared/archivematica/ +lib/* /usr/share/archivematica/ +mcpcli /usr/bin/ + + diff --git a/src/MCPrpcCLI/debian/control b/src/MCPrpcCLI/debian/control new file mode 100644 index 0000000000..0fa720aa3a --- /dev/null +++ b/src/MCPrpcCLI/debian/control @@ -0,0 +1,14 @@ +Source: archivematica-mcp-rpccli +Section: utils +Priority: extra +Maintainer: Austin Trask <austin@artefactual.com> +Build-Depends: debhelper (>= 7) +Standards-Version: 3.8.3 +Homepage: http://archivematica.org + +Package: archivematica-mcp-rpccli +Architecture: any +Depends: ${shlibs:Depends}, ${misc:Depends}, python-lxml, python-gearman, python +Description: MCP RPC CLI + Enter long description + diff --git a/src/MCPrpcCLI/debian/copyright b/src/MCPrpcCLI/debian/copyright new file mode 100644 index 0000000000..3906e99c09 --- /dev/null +++ b/src/MCPrpcCLI/debian/copyright @@ -0,0 +1,37 @@ +This work was packaged for Ubuntu by: + + Austin Trask <austin@artefactual.com> + +It was downloaded from http://archivematica.org + +Upstream Author(s): + + Joseph Perry <joseph@artefactual.com> + Jesus Garcia Crespo <jesus@artefactual.com> + Austin Trask <austin@artefactual.com> + Peter Van Garderen <peter@artefactual.com> + Evelyn McLellan <evelyn@artefactual.com> + +Copyright: + + Copyright (C) 2010-2012 Artefactual Systems Inc. <http://artefactual.com> + +License: + + This is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This software is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this software. If not, see <http://www.gnu.org/licenses/>. + + +The Debian packaging is: + + Copyright (C) 2010-2012 Artefactual Systems Inc. <http://artefactual.com> diff --git a/src/MCPrpcCLI/debian/rules b/src/MCPrpcCLI/debian/rules new file mode 100755 index 0000000000..917d9bf25d --- /dev/null +++ b/src/MCPrpcCLI/debian/rules @@ -0,0 +1,13 @@ +#!/usr/bin/make -f +# -*- makefile -*- +# Sample debian/rules that uses debhelper. +# This file was originally written by Joey Hess and Craig Small. +# As a special exception, when this file is copied by dh-make into a +# dh-make output file, you may use that output file without restriction. +# This special exception was added by Craig Small in version 0.37 of dh-make. + +# Uncomment this to turn on verbose mode. +#export DH_VERBOSE=1 + +%: + dh $@ diff --git a/src/MCPrpcCLI/lib/MCPrpcCLI.py b/src/MCPrpcCLI/lib/MCPrpcCLI.py new file mode 100755 index 0000000000..dc1c1205d3 --- /dev/null +++ b/src/MCPrpcCLI/lib/MCPrpcCLI.py @@ -0,0 +1,133 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPrpcCLI +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +import gearman +import cPickle +import lxml.etree as etree +import os +import time + +class Settings: + MCP_SERVER = ('localhost', 4730) + +settings = Settings() + +class MCPClient: + + def __init__(self, host=settings.MCP_SERVER[0], port=settings.MCP_SERVER[1]): + self.server = "%s:%d" % (host, port) + + def list(self): + gm_client = gearman.GearmanClient([self.server]) + completed_job_request = gm_client.submit_job("getJobsAwaitingApproval", "", None) + #self.check_request_status(completed_job_request) + return cPickle.loads(completed_job_request.result) + + def execute(self, uuid, choice): + gm_client = gearman.GearmanClient([self.server]) + data = {} + data["jobUUID"] = uuid + data["chain"] = choice + completed_job_request = gm_client.submit_job("approveJob", cPickle.dumps(data), None) + #self.check_request_status(completed_job_request) + return + +mcpClient = MCPClient() + +def getTagged(root, tag): #bad, I use this elsewhere, should be imported + ret = [] + for element in root: + if element.tag == tag: + ret.append(element) + return ret #only return the first encounter + return ret + +def updateJobsAwaitingApproval(jobsAwaitingApproval): + del jobsAwaitingApproval + ret = mcpClient.list() + jobsAwaitingApproval = etree.XML(ret) + return jobsAwaitingApproval + +def printJobsAwaitingApproval(jobsAwaitingApproval): + i = 0 + #print len(jobsAwaitingApproval) + for job in jobsAwaitingApproval: + print i + i += 1 + print etree.tostring(job, pretty_print=True) + +def approveJob(jobsAwaitingApproval, choice, choice2): + try: + index = int(choice) + if index >= len(jobsAwaitingApproval): + print "index out of range" + return + sipUUID = getTagged(getTagged(getTagged(jobsAwaitingApproval[index], "unit")[0], \ + "unitXML")[0], \ + "UUID")[0].text + uuid = getTagged(jobsAwaitingApproval[index], "UUID")[0].text + + chain = getTagged(getTagged(jobsAwaitingApproval[index], "choices")[0][int(choice2)], \ + "chainAvailable")[0].text + print "Approving: " + uuid, chain, sipUUID + mcpClient.execute(uuid, int(chain)) + del jobsAwaitingApproval[index] + except ValueError: + return + + +if __name__ == '__main__': + os.system("clear") + jobsAwaitingApproval = etree.Element("jobsAwaitingApproval") + jobsAwaitingApproval = updateJobsAwaitingApproval(jobsAwaitingApproval) + #print etree.tostring(jobsAwaitingApproval) + choice = "No-op" + while choice != "q": + while not (len(jobsAwaitingApproval)): + print "Fetching..." + time.sleep(2) + jobsAwaitingApproval = updateJobsAwaitingApproval(jobsAwaitingApproval) + printJobsAwaitingApproval(jobsAwaitingApproval) + print "q to quit" + print "u to update List" + print "number to approve Job" + choice = raw_input('Please enter a value:') + print "choice: " + choice + if choice == "u": + jobsAwaitingApproval = updateJobsAwaitingApproval(jobsAwaitingApproval) + else: + if choice == "q": + break + choice2 = "No-op" + while choice2 != "q": + #try: + printJobsAwaitingApproval(jobsAwaitingApproval[int(choice)][2]) + choice2 = raw_input('Please enter a value:') + print "choice2: " + choice2 + approveJob(jobsAwaitingApproval, choice, choice2) + choice2 = "q" + #except: + #print "invalid choice" + #choice2 = "q" + os.system("clear") diff --git a/src/MCPrpcCLI/lib/info.py b/src/MCPrpcCLI/lib/info.py new file mode 100755 index 0000000000..07d56f0974 --- /dev/null +++ b/src/MCPrpcCLI/lib/info.py @@ -0,0 +1,33 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage MCPrpcCLI +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +import gearman +admin = gearman.admin_client.GearmanAdminClient(host_list=["127.0.0.1"]) +for client in admin. get_workers(): + if client["client_id"] != "-": #exclude server task connections + print client["client_id"], client["ip"] + +for stat in admin.get_status(): + if stat["running"] != 0 or stat["queued"] != 0: + print stat diff --git a/src/MCPrpcCLI/mcpcli b/src/MCPrpcCLI/mcpcli new file mode 100755 index 0000000000..94ead0e631 --- /dev/null +++ b/src/MCPrpcCLI/mcpcli @@ -0,0 +1,3 @@ +#!/bin/sh + +./usr/share/archivematica/MCPrpcCLI.py diff --git a/src/SIPCreationTools/bin/archivematicaCreateDublincore b/src/SIPCreationTools/bin/archivematicaCreateDublincore new file mode 100755 index 0000000000..6c3954281c --- /dev/null +++ b/src/SIPCreationTools/bin/archivematicaCreateDublincore @@ -0,0 +1 @@ +/usr/lib/archivematica/MCPClient/clientScripts/archivematicaCreateDublinCore.py "$@" \ No newline at end of file diff --git a/src/SIPCreationTools/bin/archivematicaCreateMD5 b/src/SIPCreationTools/bin/archivematicaCreateMD5 new file mode 100755 index 0000000000..f5c4f87724 --- /dev/null +++ b/src/SIPCreationTools/bin/archivematicaCreateMD5 @@ -0,0 +1 @@ +/usr/lib/archivematica/SIPCreationTools/createMD5Checksum/createMD5.sh "$@" \ No newline at end of file diff --git a/src/SIPCreationTools/bin/archivematicaRestructureForCompliance b/src/SIPCreationTools/bin/archivematicaRestructureForCompliance new file mode 100755 index 0000000000..364e7ec5e2 --- /dev/null +++ b/src/SIPCreationTools/bin/archivematicaRestructureForCompliance @@ -0,0 +1 @@ +/usr/lib/archivematica/SIPCreationTools/restructureForCompliance/restructureForCompliance.sh "$@" \ No newline at end of file diff --git a/src/SIPCreationTools/bin/transferSip b/src/SIPCreationTools/bin/transferSip new file mode 100755 index 0000000000..833b259415 --- /dev/null +++ b/src/SIPCreationTools/bin/transferSip @@ -0,0 +1,91 @@ +#!/usr/bin/python +# +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage SIP submission +# @author Austin Trask <austin@artefactual.com> +# @version svn: $Id$ +# + +import os +import sys +import uuid +import shutil +import argparse +import subprocess + +########### +##Create options and arguments +########### +parser = argparse.ArgumentParser() + +parser.add_argument('-SIP', action='store', dest='sipForSubmission', default='/some/random/dir', help='Path to SIP') + +parser.add_argument('-local', action='store', dest='localSubmission', default='/var/archivematica/sharedDirectory/watchedDirectories/receiveSIP', help='set the local SIP destination (/path/to/my/receiveSIP/)') + +parser.add_argument('-remotehost', action='store', dest='remoteHost', help='set the remote host (username@host)') + +parser.add_argument('-remotetmp', action='store', dest='remoteTmp', default='/tmp/sipSend/', help='set the remote SIP destination /tmp/destination/folder') + +parser.add_argument('-remotereceivesip', action='store', dest='remoteReceiveSip', default='/var/archivematica/sharedDirectory/watchedDirectories/receiveSIP', help='recieveSip directory on remote host') + + +results = parser.parse_args() + + +########### +##check if arguments exist if so attempt transfer or move +########### +if os.path.isdir(results.sipForSubmission): + try: + baseSip = os.path.basename(results.sipForSubmission) + if os.path.isdir(results.localSubmission): + try: + shutil.move(results.sipForSubmission, results.localSubmission) + runStatus = baseSip + ": local transfer Success!" + except IOError, e: + print e + sys.exit("fail: please check your path") + else: + print("no local directory set, trying remote host") + if results.remoteHost: + try: + tmpUUID = uuid.uuid1() + tmpDir = results.remoteTmp + str(tmpUUID) + tmpSip = tmpDir + "/" + baseSip + destHostSip = results.remoteHost + ":" + tmpDir + p = subprocess.call(['ssh', results.remoteHost, 'mkdir', '-p', tmpDir]) + p = subprocess.call(['rsync', '-crh', results.sipForSubmission, destHostSip]) + p = subprocess.call(['ssh', results.remoteHost, 'mv', tmpSip, results.remoteReceiveSip]) + runStatus = baseSip + ": remote transfer Success!" + except IOError, e: + print e + sys.exit("fail: please check your user name ssh key") + else: + print("remote ssh transfer failed, use --help for more information") + sys.exit("fail: please check your paths" ) + except IOError, e: + print e + sys.exit("fail: please check your path") +else: + sys.exit("fail: please set a SIP") + +print runStatus +p = subprocess.call(['notify-send', '-u', 'critical', 'SIP Submission', runStatus]) + diff --git a/src/SIPCreationTools/debian/control b/src/SIPCreationTools/debian/control new file mode 100644 index 0000000000..7f9c5106b4 --- /dev/null +++ b/src/SIPCreationTools/debian/control @@ -0,0 +1,14 @@ +Source: sip-creation-tools +Section: utils +Priority: extra +Maintainer: Austin Trask <austin@artefactual.com> +Build-Depends: debhelper (>= 7) +Standards-Version: 3.8.3 +Homepage: http://archivematica.org + +Package: sip-creation-tools +Architecture: any +Depends: ${shlibs:Depends}, ${misc:Depends}, python +Description: SIP creation tools for Archivematica + Enter long description + diff --git a/src/SIPCreationTools/debian/copyright b/src/SIPCreationTools/debian/copyright new file mode 100644 index 0000000000..3906e99c09 --- /dev/null +++ b/src/SIPCreationTools/debian/copyright @@ -0,0 +1,37 @@ +This work was packaged for Ubuntu by: + + Austin Trask <austin@artefactual.com> + +It was downloaded from http://archivematica.org + +Upstream Author(s): + + Joseph Perry <joseph@artefactual.com> + Jesus Garcia Crespo <jesus@artefactual.com> + Austin Trask <austin@artefactual.com> + Peter Van Garderen <peter@artefactual.com> + Evelyn McLellan <evelyn@artefactual.com> + +Copyright: + + Copyright (C) 2010-2012 Artefactual Systems Inc. <http://artefactual.com> + +License: + + This is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This software is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this software. If not, see <http://www.gnu.org/licenses/>. + + +The Debian packaging is: + + Copyright (C) 2010-2012 Artefactual Systems Inc. <http://artefactual.com> diff --git a/src/SIPCreationTools/debian/rules b/src/SIPCreationTools/debian/rules new file mode 100755 index 0000000000..917d9bf25d --- /dev/null +++ b/src/SIPCreationTools/debian/rules @@ -0,0 +1,13 @@ +#!/usr/bin/make -f +# -*- makefile -*- +# Sample debian/rules that uses debhelper. +# This file was originally written by Joey Hess and Craig Small. +# As a special exception, when this file is copied by dh-make into a +# dh-make output file, you may use that output file without restriction. +# This special exception was added by Craig Small in version 0.37 of dh-make. + +# Uncomment this to turn on verbose mode. +#export DH_VERBOSE=1 + +%: + dh $@ diff --git a/src/SIPCreationTools/debian/sip-creation-tools.install b/src/SIPCreationTools/debian/sip-creation-tools.install new file mode 100644 index 0000000000..4f6bb9845b --- /dev/null +++ b/src/SIPCreationTools/debian/sip-creation-tools.install @@ -0,0 +1,2 @@ +bin/* /usr/bin/ +lib/* /usr/lib/archivematica/SIPCreationTools/ diff --git a/src/SIPCreationTools/etc/.gitignore b/src/SIPCreationTools/etc/.gitignore new file mode 100644 index 0000000000..cead4f2b3b --- /dev/null +++ b/src/SIPCreationTools/etc/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +\!.gitignore diff --git a/src/SIPCreationTools/lib/createMD5Checksum/createMD5.sh b/src/SIPCreationTools/lib/createMD5Checksum/createMD5.sh new file mode 100755 index 0000000000..e25863161b --- /dev/null +++ b/src/SIPCreationTools/lib/createMD5Checksum/createMD5.sh @@ -0,0 +1,40 @@ +#!/bin/bash + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage SIPCreationTools +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +#source /etc/archivematica/SIPCreationTools/md5Settings + +outputDirectory="./../metadata" +outputFile="${outputDirectory}/checksum.md5" +set -e +cd "$1" +cd objects +if [ -d "${outputDirectory}" ]; then + md5deep -rl "." > "${outputFile}" +else + echo $outputDirectory does not exist\ 1>&2 + echo this script needs to be run from the objects directory 1>&2 + exit 1 +fi + + diff --git a/src/SIPCreationTools/lib/restructureForCompliance/restructureForCompliance.sh b/src/SIPCreationTools/lib/restructureForCompliance/restructureForCompliance.sh new file mode 100755 index 0000000000..bf8f1cc74d --- /dev/null +++ b/src/SIPCreationTools/lib/restructureForCompliance/restructureForCompliance.sh @@ -0,0 +1,44 @@ +#!/bin/bash +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage SIPCreationTools +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +target=$1 + +if [ -d "$target" ]; then + temp="/tmp/`uuid`" + mkdir "$temp" + + mv "$target"/* "$temp/." + + mkdir "${target}logs" + mkdir "${target}logs/fileMeta" + mkdir "${target}metadata" + mkdir "${target}metadata/submissionDocumentation" + mkdir "${target}objects" + mv "$temp"/* "${target}objects/." +else + echo Error: Needs SIP directory as argument 1>&2 + exit 1 +fi + + + diff --git a/src/archivematicaCommon/README.txt b/src/archivematicaCommon/README.txt new file mode 100644 index 0000000000..5f415d3a9e --- /dev/null +++ b/src/archivematicaCommon/README.txt @@ -0,0 +1 @@ +This package is a support package for archivematica. It contains library a library of functions used in the archivematica system. diff --git a/src/archivematicaCommon/debian/archivematica-common.install b/src/archivematicaCommon/debian/archivematica-common.install new file mode 100755 index 0000000000..865f6ea091 --- /dev/null +++ b/src/archivematicaCommon/debian/archivematica-common.install @@ -0,0 +1,2 @@ +etc/* /etc/archivematica/archivematicaCommon/ +lib/* /usr/lib/archivematica/archivematicaCommon/ diff --git a/src/archivematicaCommon/debian/control b/src/archivematicaCommon/debian/control new file mode 100755 index 0000000000..edd3673b78 --- /dev/null +++ b/src/archivematicaCommon/debian/control @@ -0,0 +1,13 @@ +Source: archivematica-common +Section: utils +Priority: extra +Maintainer: Austin Trask <austin@artefactual.com> +Build-Depends: debhelper (>= 7) +Standards-Version: 3.8.3 +Homepage: http://archivematica.org + +Package: archivematica-common +Architecture: any +Depends: ${shlibs:Depends}, ${misc:Depends}, python, python2.7-elementtree +Description: Common libraries for archivematica + This package is a support package for archivematica. It contains a library of functions used in the archivematica system. diff --git a/src/archivematicaCommon/debian/copyright b/src/archivematicaCommon/debian/copyright new file mode 100755 index 0000000000..3906e99c09 --- /dev/null +++ b/src/archivematicaCommon/debian/copyright @@ -0,0 +1,37 @@ +This work was packaged for Ubuntu by: + + Austin Trask <austin@artefactual.com> + +It was downloaded from http://archivematica.org + +Upstream Author(s): + + Joseph Perry <joseph@artefactual.com> + Jesus Garcia Crespo <jesus@artefactual.com> + Austin Trask <austin@artefactual.com> + Peter Van Garderen <peter@artefactual.com> + Evelyn McLellan <evelyn@artefactual.com> + +Copyright: + + Copyright (C) 2010-2012 Artefactual Systems Inc. <http://artefactual.com> + +License: + + This is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This software is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this software. If not, see <http://www.gnu.org/licenses/>. + + +The Debian packaging is: + + Copyright (C) 2010-2012 Artefactual Systems Inc. <http://artefactual.com> diff --git a/src/archivematicaCommon/debian/rules b/src/archivematicaCommon/debian/rules new file mode 100755 index 0000000000..917d9bf25d --- /dev/null +++ b/src/archivematicaCommon/debian/rules @@ -0,0 +1,13 @@ +#!/usr/bin/make -f +# -*- makefile -*- +# Sample debian/rules that uses debhelper. +# This file was originally written by Joey Hess and Craig Small. +# As a special exception, when this file is copied by dh-make into a +# dh-make output file, you may use that output file without restriction. +# This special exception was added by Craig Small in version 0.37 of dh-make. + +# Uncomment this to turn on verbose mode. +#export DH_VERBOSE=1 + +%: + dh $@ diff --git a/src/archivematicaCommon/etc/dbsettings b/src/archivematicaCommon/etc/dbsettings new file mode 100644 index 0000000000..a2c864733e --- /dev/null +++ b/src/archivematicaCommon/etc/dbsettings @@ -0,0 +1,5 @@ +[client] +user=demo +password="demo" +host=localhost + diff --git a/src/archivematicaCommon/lib/__init__.py b/src/archivematicaCommon/lib/__init__.py new file mode 100644 index 0000000000..0a6e37bb45 --- /dev/null +++ b/src/archivematicaCommon/lib/__init__.py @@ -0,0 +1 @@ +import archivematicaMCPFileUUID diff --git a/src/archivematicaCommon/lib/archivematicaCreateStructuredDirectory.py b/src/archivematicaCommon/lib/archivematicaCreateStructuredDirectory.py new file mode 100755 index 0000000000..63c3c72d3a --- /dev/null +++ b/src/archivematicaCommon/lib/archivematicaCreateStructuredDirectory.py @@ -0,0 +1,39 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaCommon +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ +import os +import sys +requiredDirectories = ["objects", \ + "logs", \ + "metadata",\ + "metadata/submissionDocumentation"] + +def createStructuredDirectory(SIPDir): + for directory in requiredDirectories: + path = os.path.join(SIPDir, directory) + if not os.path.isdir(path): + os.makedirs(path) + +if __name__ == '__main__': + SIPDir = sys.argv[1] + createStructuredDirectory(SIPDir) diff --git a/src/archivematicaCommon/lib/archivematicaCreateStructuredDirectory.sh b/src/archivematicaCommon/lib/archivematicaCreateStructuredDirectory.sh new file mode 100755 index 0000000000..a8fb6f60a9 --- /dev/null +++ b/src/archivematicaCommon/lib/archivematicaCreateStructuredDirectory.sh @@ -0,0 +1,28 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaCommon +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + + +szAnswer=$(zenity --entry --width=375 --title "Create Structured Directory" --text "Directory name?\nNote: Cannot be changed once created.") && /usr/lib/archivematica/archivematicaCommon/archivematicaCreateStructuredDirectory.py "${szAnswer}" + + + + diff --git a/src/archivematicaCommon/lib/archivematicaFunctions.py b/src/archivematicaCommon/lib/archivematicaFunctions.py new file mode 100755 index 0000000000..44178c3a0e --- /dev/null +++ b/src/archivematicaCommon/lib/archivematicaFunctions.py @@ -0,0 +1,114 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaCommon +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +import lxml.etree as etree +import os +import sys + +def unicodeToStr(string): + if isinstance(string, unicode): + string = string.encode("utf-8") + return string + +def strToUnicode(string): + if isinstance(string, str): + string = string.decode("utf-8") + return string + + +def getTagged(root, tag): + ret = [] + for element in root: + #print element.tag + #print tag + #print element.tag == tag + if element.tag == tag: + ret.append(element) + #return ret #only return the first encounter + return ret + + +def appendEventToFile(SIPLogsDirectory, fileUUID, eventXML): + xmlFile = SIPLogsDirectory + "fileMeta/" + fileUUID + ".xml" + appendEventToFile2(xmlFile, eventXML) + +def appendEventToFile2(xmlFile, eventXML): + tree = etree.parse( xmlFile ) + root = tree.getroot() + + events = getTagged(root, "events")[0] + events.append(eventXML) + + tree = etree.ElementTree(root) + tree.write(xmlFile) + +def archivematicaRenameFile(SIPLogsDirectory, fileUUID, newName, eventXML): + xmlFile = SIPLogsDirectory + "fileMeta/" + fileUUID + ".xml" + newName = newName.decode('utf-8') + tree = etree.parse( xmlFile ) + root = tree.getroot() + xmlFileName = getTagged(root, "currentFileName")[0] + xmlFileName.text = newName + + events = getTagged(root, "events")[0] + events.append(eventXML) + + #print etree.tostring(root, pretty_print=True) + + tree = etree.ElementTree(root) + tree.write(xmlFile) + + +def fileNoLongerExists(root, objectsDir): + """Returns 0 if not deleted, 1 if deleted, -1 if deleted, but already an event to indicated it has been removed""" + events = getTagged(root, "events")[0] + + for event in getTagged(events, "event"): + #print >>sys.stderr , "event" + etype = getTagged(event, "eventType") + if len(etype) and etype[0].text == "fileRemoved": + #print >>sys.stderr , "file already removed" + return -1 + + currentName = getTagged(root, "currentFileName")[0].text + + currentName2 = currentName.replace("objects", objectsDir, 1) + if os.path.isfile(currentName2.encode('utf8')): + return 0 + else: + print currentName + return 1 + +def escapeForCommand(string): + ret = string + if isinstance(ret, unicode) or isinstance(ret,str) : + ret = ret.replace("\\", "\\\\") + ret = ret.replace("\"", "\\\"") + #ret = ret.replace("'", "\\'") + ret = ret.replace("$", "\\$") + return ret + +def escape(string): + #string = string.decode('utf-8') + return string diff --git a/src/archivematicaCommon/lib/archivematicaMCPFileUUID.py b/src/archivematicaCommon/lib/archivematicaMCPFileUUID.py new file mode 100755 index 0000000000..610c253b43 --- /dev/null +++ b/src/archivematicaCommon/lib/archivematicaMCPFileUUID.py @@ -0,0 +1,167 @@ +#!/usr/bin/python -OO +# +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaCommon +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +#~DOC~ +# +#This file is all in support of one goal: +#to get the UUID of a file efficiently. +#Primarily it looks for them in the 'sipUUIDfile' : /logs/fileUUIDs.log +#Failing that, it checks each of the fileMeta xml files to match the filename on the 'currentFileName' field. +#In order to do this efficiently and not block the processing of other SIPs, it dynamically creates a lock for each SIP, based on the UUID. +# + + +import os +import lxml.etree as etree +import sys +import threading +import string + +lockDicsLock = threading.Lock() +sipUUIDFileLocksCount = {} +sipUUIDFileLocks = {} + +def releaseSIPUUIDFileLock(sipUUIDfile): + lockDicsLock.acquire() + sipUUIDFileLocksCount[sipUUIDfile] -= 1 + if sipUUIDFileLocksCount[sipUUIDfile] == 0: + #remove the locks from the system to prevent memory leak. + #print "actually removing lock: " + sipUUIDfile + del sipUUIDFileLocksCount[sipUUIDfile] + del sipUUIDFileLocks[sipUUIDfile] + lockDicsLock.release() + +def acquireSIPUUIDFileLock(sipUUIDfile): + lockDicsLock.acquire() + if sipUUIDfile in sipUUIDFileLocksCount: + sipUUIDFileLocksCount[sipUUIDfile] += 1 + else: + sipUUIDFileLocksCount[sipUUIDfile] = 1 + sipUUIDFileLocks[sipUUIDfile] = threading.Lock() + lockDicsLock.release() + if sipUUIDfile in sipUUIDFileLocks: + sipUUIDFileLocks[sipUUIDfile].acquire() + else: + print "Software logic error. This should not happen." + +def loadFileUUIDsDic(sipUUIDfile): + UUIDsDic = {} + if os.path.isfile(sipUUIDfile): + FileUUIDs_fh = open(sipUUIDfile, "r") + line = FileUUIDs_fh.readline() + while line: + theFileLine = line.split(" -> ",1) + if len(theFileLine) > 1 : + fileUUID = theFileLine[0] + fileName = theFileLine[1] + fileName = string.replace(fileName, "\n", "", 1) + UUIDsDic[fileName] = fileUUID + line = FileUUIDs_fh.readline() + else: + UUIDsDic = {} + return UUIDsDic + +def getTagged(root, tag): + ret = [] + for element in root: + if element.tag == tag: + ret.append(element) + return ret #only return the first encounter + return ret + +def findUUIDFromFileUUIDxml(sipUUIDfile, filename, fileUUIDxmlFilesDirectory, updateSIPUUIDfile=True): + ret = "No UUID for file: " + filename + #for every file in the fileUUIDxmlFilesDirectory: + configFiles = [] + try: + for dirs, subDirs, files in os.walk(fileUUIDxmlFilesDirectory): + configFiles = files + break + + #print "config file - dir: ", fileUUIDxmlFilesDirectory + for configFile in configFiles: + if configFile.endswith(".xml"): + try: + #print "config file - opening: " + configFile + tree = etree.parse(fileUUIDxmlFilesDirectory + configFile ) + root = tree.getroot() + xmlFileName = getTagged(root, "currentFileName")[0] + uuid = getTagged(root, "fileUUID")[0] + if xmlFileName.text == filename: + ret = uuid.text + try: + if updateSIPUUIDfile: + acquireSIPUUIDFileLock(sipUUIDfile) + f = open(sipUUIDfile, 'a') + f.write(uuid.text + " -> " + filename + "\n") + f.close() + except OSError, ose: + print >>sys.stderr, "output Error", ose + return -2 + except IOError as (errno, strerror): + print "I/O error({0}): {1}".format(errno, strerror) + except: + print "debug except 1" + #print "releasing Lock" + if updateSIPUUIDfile: + releaseSIPUUIDFileLock(sipUUIDfile) + return ret + except Exception as inst: + print "debug except 2" + print type(inst) # the exception instance + print inst.args # arguments stored in .args + print inst # __str__ allows args to printed directly + continue + except: + print "debug except 3" + ret = ret + return ret + + +def getUUIDOfFile(sipUUIDfile, basepath, fullFileName, fileUUIDxmlFilesDirectory, relativeString="objects/"): + UUIDsDic = loadFileUUIDsDic(sipUUIDfile) + if basepath not in fullFileName: + return "No UUID for file: " + os.path.basename(fullFileName) + filename = string.replace( fullFileName, basepath, relativeString, 1 ) + if UUIDsDic and filename in UUIDsDic: + return UUIDsDic[filename] + else : + return findUUIDFromFileUUIDxml(sipUUIDfile, filename, fileUUIDxmlFilesDirectory) + + +if __name__ == '__main__': + function = sys.argv[1] + + if function == "Logline" : + basepath = sys.argv[2] + fullFileName = sys.argv[3] + filename = string.replace( fullFileName, basepath, "objects", 1 ) + print filename + + elif function == "getFileUUID": + sipUUIDfile = sys.argv[2] + basepath = sys.argv[3] + fullFileName = sys.argv[4] + fileUUIDxmlFilesDirectory = sys.argv[5] + print getUUIDOfFile( sipUUIDfile, basepath, fullFileName, fileUUIDxmlFilesDirectory) diff --git a/src/archivematicaCommon/lib/countryCodes.py b/src/archivematicaCommon/lib/countryCodes.py new file mode 100755 index 0000000000..44d6e81d57 --- /dev/null +++ b/src/archivematicaCommon/lib/countryCodes.py @@ -0,0 +1,302 @@ +#!/usr/bin/env python +# -*- coding: latin-1 -*- +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaCommon +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + + + +#http://www.iso.org/iso/country_codes.htm +#Country Codes - ISO 3166 +#Country Name;ISO 3166-1-alpha-2 code + + +countryCodes = { + "AFGHANISTAN" : "AF", + "Ã…LAND ISLANDS" : "AX", + "ALBANIA" : "AL", + "ALGERIA" : "DZ", + "AMERICAN SAMOA" : "AS", + "ANDORRA" : "AD", + "ANGOLA" : "AO", + "ANGUILLA" : "AI", + "ANTARCTICA" : "AQ", + "ANTIGUA AND BARBUDA" : "AG", + "ARGENTINA" : "AR", + "ARMENIA" : "AM", + "ARUBA" : "AW", + "AUSTRALIA" : "AU", + "AUSTRIA" : "AT", + "AZERBAIJAN" : "AZ", + "BAHAMAS" : "BS", + "BAHRAIN" : "BH", + "BANGLADESH" : "BD", + "BARBADOS" : "BB", + "BELARUS" : "BY", + "BELGIUM" : "BE", + "BELIZE" : "BZ", + "BENIN" : "BJ", + "BERMUDA" : "BM", + "BHUTAN" : "BT", + "BOLIVIA, PLURINATIONAL STATE OF" : "BO", + "BONAIRE, SINT EUSTATIUS AND SABA" : "BQ", + "BOSNIA AND HERZEGOVINA" : "BA", + "BOTSWANA" : "BW", + "BOUVET ISLAND" : "BV", + "BRAZIL" : "BR", + "BRITISH INDIAN OCEAN TERRITORY" : "IO", + "BRUNEI DARUSSALAM" : "BN", + "BULGARIA" : "BG", + "BURKINA FASO" : "BF", + "BURUNDI" : "BI", + "CAMBODIA" : "KH", + "CAMEROON" : "CM", + "CANADA" : "CA", + "CAPE VERDE" : "CV", + "CAYMAN ISLANDS" : "KY", + "CENTRAL AFRICAN REPUBLIC" : "CF", + "CHAD" : "TD", + "CHILE" : "CL", + "CHINA" : "CN", + "CHRISTMAS ISLAND" : "CX", + "COCOS (KEELING) ISLANDS" : "CC", + "COLOMBIA" : "CO", + "COMOROS" : "KM", + "CONGO" : "CG", + "CONGO, THE DEMOCRATIC REPUBLIC OF THE" : "CD", + "COOK ISLANDS" : "CK", + "COSTA RICA" : "CR", + "CÔTE D'IVOIRE" : "CI", + "CROATIA" : "HR", + "CUBA" : "CU", + "CURAÇAO" : "CW", + "CYPRUS" : "CY", + "CZECH REPUBLIC" : "CZ", + "DENMARK" : "DK", + "DJIBOUTI" : "DJ", + "DOMINICA" : "DM", + "DOMINICAN REPUBLIC" : "DO", + "ECUADOR" : "EC", + "EGYPT" : "EG", + "EL SALVADOR" : "SV", + "EQUATORIAL GUINEA" : "GQ", + "ERITREA" : "ER", + "ESTONIA" : "EE", + "ETHIOPIA" : "ET", + "FALKLAND ISLANDS (MALVINAS)" : "FK", + "FAROE ISLANDS" : "FO", + "FIJI" : "FJ", + "FINLAND" : "FI", + "FRANCE" : "FR", + "FRENCH GUIANA" : "GF", + "FRENCH POLYNESIA" : "PF", + "FRENCH SOUTHERN TERRITORIES" : "TF", + "GABON" : "GA", + "GAMBIA" : "GM", + "GEORGIA" : "GE", + "GERMANY" : "DE", + "GHANA" : "GH", + "GIBRALTAR" : "GI", + "GREECE" : "GR", + "GREENLAND" : "GL", + "GRENADA" : "GD", + "GUADELOUPE" : "GP", + "GUAM" : "GU", + "GUATEMALA" : "GT", + "GUERNSEY" : "GG", + "GUINEA" : "GN", + "GUINEA-BISSAU" : "GW", + "GUYANA" : "GY", + "HAITI" : "HT", + "HEARD ISLAND AND MCDONALD ISLANDS" : "HM", + "HOLY SEE (VATICAN CITY STATE)" : "VA", + "HONDURAS" : "HN", + "HONG KONG" : "HK", + "HUNGARY" : "HU", + "ICELAND" : "IS", + "INDIA" : "IN", + "INDONESIA" : "ID", + "IRAN, ISLAMIC REPUBLIC OF" : "IR", + "IRAQ" : "IQ", + "IRELAND" : "IE", + "ISLE OF MAN" : "IM", + "ISRAEL" : "IL", + "ITALY" : "IT", + "JAMAICA" : "JM", + "JAPAN" : "JP", + "JERSEY" : "JE", + "JORDAN" : "JO", + "KAZAKHSTAN" : "KZ", + "KENYA" : "KE", + "KIRIBATI" : "KI", + "KOREA, DEMOCRATIC PEOPLE'S REPUBLIC OF" : "KP", + "KOREA, REPUBLIC OF" : "KR", + "KUWAIT" : "KW", + "KYRGYZSTAN" : "KG", + "LAO PEOPLE'S DEMOCRATIC REPUBLIC" : "LA", + "LATVIA" : "LV", + "LEBANON" : "LB", + "LESOTHO" : "LS", + "LIBERIA" : "LR", + "LIBYA" : "LY", + "LIECHTENSTEIN" : "LI", + "LITHUANIA" : "LT", + "LUXEMBOURG" : "LU", + "MACAO" : "MO", + "MACEDONIA, THE FORMER YUGOSLAV REPUBLIC OF" : "MK", + "MADAGASCAR" : "MG", + "MALAWI" : "MW", + "MALAYSIA" : "MY", + "MALDIVES" : "MV", + "MALI" : "ML", + "MALTA" : "MT", + "MARSHALL ISLANDS" : "MH", + "MARTINIQUE" : "MQ", + "MAURITANIA" : "MR", + "MAURITIUS" : "MU", + "MAYOTTE" : "YT", + "MEXICO" : "MX", + "MICRONESIA, FEDERATED STATES OF" : "FM", + "MOLDOVA, REPUBLIC OF" : "MD", + "MONACO" : "MC", + "MONGOLIA" : "MN", + "MONTENEGRO" : "ME", + "MONTSERRAT" : "MS", + "MOROCCO" : "MA", + "MOZAMBIQUE" : "MZ", + "MYANMAR" : "MM", + "NAMIBIA" : "NA", + "NAURU" : "NR", + "NEPAL" : "NP", + "NETHERLANDS" : "NL", + "NEW CALEDONIA" : "NC", + "NEW ZEALAND" : "NZ", + "NICARAGUA" : "NI", + "NIGER" : "NE", + "NIGERIA" : "NG", + "NIUE" : "NU", + "NORFOLK ISLAND" : "NF", + "NORTHERN MARIANA ISLANDS" : "MP", + "NORWAY" : "NO", + "OMAN" : "OM", + "PAKISTAN" : "PK", + "PALAU" : "PW", + "PALESTINIAN TERRITORY, OCCUPIED" : "PS", + "PANAMA" : "PA", + "PAPUA NEW GUINEA" : "PG", + "PARAGUAY" : "PY", + "PERU" : "PE", + "PHILIPPINES" : "PH", + "PITCAIRN" : "PN", + "POLAND" : "PL", + "PORTUGAL" : "PT", + "PUERTO RICO" : "PR", + "QATAR" : "QA", + "RÉUNION" : "RE", + "ROMANIA" : "RO", + "RUSSIAN FEDERATION" : "RU", + "RWANDA" : "RW", + "SAINT BARTHÉLEMY" : "BL", + "SAINT HELENA, ASCENSION AND TRISTAN DA CUNHA" : "SH", + "SAINT KITTS AND NEVIS" : "KN", + "SAINT LUCIA" : "LC", + "SAINT MARTIN (FRENCH PART)" : "MF", + "SAINT PIERRE AND MIQUELON" : "PM", + "SAINT VINCENT AND THE GRENADINES" : "VC", + "SAMOA" : "WS", + "SAN MARINO" : "SM", + "SAO TOME AND PRINCIPE" : "ST", + "SAUDI ARABIA" : "SA", + "SENEGAL" : "SN", + "SERBIA" : "RS", + "SEYCHELLES" : "SC", + "SIERRA LEONE" : "SL", + "SINGAPORE" : "SG", + "SINT MAARTEN (DUTCH PART)" : "SX", + "SLOVAKIA" : "SK", + "SLOVENIA" : "SI", + "SOLOMON ISLANDS" : "SB", + "SOMALIA" : "SO", + "SOUTH AFRICA" : "ZA", + "SOUTH GEORGIA AND THE SOUTH SANDWICH ISLANDS" : "GS", + "SOUTH SUDAN" : "SS", + "SPAIN" : "ES", + "SRI LANKA" : "LK", + "SUDAN" : "SD", + "SURINAME" : "SR", + "SVALBARD AND JAN MAYEN" : "SJ", + "SWAZILAND" : "SZ", + "SWEDEN" : "SE", + "SWITZERLAND" : "CH", + "SYRIAN ARAB REPUBLIC" : "SY", + "TAIWAN, PROVINCE OF CHINA" : "TW", + "TAJIKISTAN" : "TJ", + "TANZANIA, UNITED REPUBLIC OF" : "TZ", + "THAILAND" : "TH", + "TIMOR-LESTE" : "TL", + "TOGO" : "TG", + "TOKELAU" : "TK", + "TONGA" : "TO", + "TRINIDAD AND TOBAGO" : "TT", + "TUNISIA" : "TN", + "TURKEY" : "TR", + "TURKMENISTAN" : "TM", + "TURKS AND CAICOS ISLANDS" : "TC", + "TUVALU" : "TV", + "UGANDA" : "UG", + "UKRAINE" : "UA", + "UNITED ARAB EMIRATES" : "AE", + "UNITED KINGDOM" : "GB", + "UNITED STATES" : "US", + "UNITED STATES MINOR OUTLYING ISLANDS" : "UM", + "URUGUAY" : "UY", + "UZBEKISTAN" : "UZ", + "VANUATU" : "VU", + "VENEZUELA, BOLIVARIAN REPUBLIC OF" : "VE", + "VIET NAM" : "VN", + "VIRGIN ISLANDS, BRITISH" : "VG", + "VIRGIN ISLANDS, U.S." : "VI", + "WALLIS AND FUTUNA" : "WF", + "WESTERN SAHARA" : "EH", + "YEMEN" : "YE", + "ZAMBIA" : "ZM", + "ZIMBABWE" : "ZW" +} + +def getCountryCodes(): + return countryCodes + +def getCodeForCountry(country): + if country in countryCodes: + return countryCodes[country] + else: + return None + +def getCountryFromCode(code): + for key, value in countryCodes.iteritems(): + if value == code: + return key + return None + + + + + diff --git a/src/archivematicaCommon/lib/databaseFunctions.py b/src/archivematicaCommon/lib/databaseFunctions.py new file mode 100755 index 0000000000..f5e492f73f --- /dev/null +++ b/src/archivematicaCommon/lib/databaseFunctions.py @@ -0,0 +1,182 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaCommon +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ +import os +import sys +import databaseInterface +import MySQLdb +import uuid +from archivematicaFunctions import unicodeToStr + +def escapeForDB(str): + str = unicodeToStr(str) + str = MySQLdb.escape_string(str) + return str + +def insertIntoFiles(fileUUID, filePath, enteredSystem=databaseInterface.getUTCDate(), transferUUID="", sipUUID="", use="original"): + if transferUUID != "" and sipUUID == "": + databaseInterface.runSQL("""INSERT INTO Files (fileUUID, originalLocation, currentLocation, enteredSystem, fileGrpUse, transferUUID) + VALUES ( '""" + fileUUID + databaseInterface.separator \ + + escapeForDB(filePath) + databaseInterface.separator \ + + escapeForDB(filePath) + databaseInterface.separator \ + + enteredSystem + databaseInterface.separator \ + + use + databaseInterface.separator \ + + transferUUID + "' )" ) + elif transferUUID == "" and sipUUID != "": + databaseInterface.runSQL("""INSERT INTO Files (fileUUID, originalLocation, currentLocation, enteredSystem, fileGrpUse, sipUUID) + VALUES ( '""" + fileUUID + databaseInterface.separator \ + + escapeForDB(filePath) + databaseInterface.separator \ + + escapeForDB(filePath) + databaseInterface.separator \ + + enteredSystem + databaseInterface.separator \ + + use + databaseInterface.separator \ + + sipUUID + "' )" ) + else: + print >>sys.stderr, "not supported yet - both SIP and transfer UUID's defined (or neither defined)" + print >>sys.stderr, "SIP UUID:", sipUUID + print >>sys.stderr, "transferUUID:", transferUUID + raise Exception("not supported yet - both SIP and transfer UUID's defined (or neither defined)", sipUUID + "-" + transferUUID) + +def insertIntoEvents(fileUUID="", eventIdentifierUUID="", eventType="", eventDateTime=databaseInterface.getUTCDate(), eventDetail="", eventOutcome="", eventOutcomeDetailNote=""): + if eventIdentifierUUID == "": + eventIdentifierUUID = uuid.uuid4().__str__() + databaseInterface.runSQL("""INSERT INTO Events (fileUUID, eventIdentifierUUID, eventType, eventDateTime, eventDetail, eventOutcome, eventOutcomeDetailNote) + VALUES ( '""" + escapeForDB(fileUUID) + databaseInterface.separator \ + + escapeForDB(eventIdentifierUUID) + databaseInterface.separator \ + + escapeForDB(eventType) + databaseInterface.separator \ + + escapeForDB(eventDateTime) + databaseInterface.separator \ + + escapeForDB(eventDetail) + databaseInterface.separator \ + + escapeForDB(eventOutcome) + databaseInterface.separator \ + + escapeForDB(eventOutcomeDetailNote) + "' )" ) + +def insertIntoDerivations(sourceFileUUID="", derivedFileUUID="", relatedEventUUID=""): + databaseInterface.runSQL("""INSERT INTO Derivations + (sourceFileUUID, derivedFileUUID, relatedEventUUID) + VALUES ( '""" \ + + sourceFileUUID + databaseInterface.separator \ + + derivedFileUUID + databaseInterface.separator \ + + relatedEventUUID + "');") + +def insertIntoFilesFits(fileUUID="", fitsXMLString=""): + databaseInterface.runSQL("""INSERT INTO FilesFits + (fileUUID, FITSxml) + VALUES ( '""" \ + + escapeForDB(fileUUID) + databaseInterface.separator \ + + escapeForDB(fitsXMLString) + "');") + +def insertIntoFilesIDs(fileUUID="", formatName="", formatVersion="", formatRegistryName="", formatRegistryKey=""): + databaseInterface.runSQL("""INSERT INTO FilesIDs + (fileUUID, formatName, formatVersion, formatRegistryName, formatRegistryKey) + VALUES ( '""" \ + + escapeForDB(fileUUID) + databaseInterface.separator \ + + escapeForDB(formatName) + databaseInterface.separator \ + + escapeForDB(formatVersion) + databaseInterface.separator \ + + escapeForDB(formatRegistryName) + databaseInterface.separator \ + + escapeForDB(formatRegistryKey) + "');") + + + +#user approved? +#client connected/disconnected. + +def logTaskCreatedSQL(taskManager, commandReplacementDic, taskUUID, arguments): + taskUUID = taskUUID + jobUUID = taskManager.jobChainLink.UUID + fileUUID = "" + if "%fileUUID%" in commandReplacementDic: + fileUUID = commandReplacementDic["%fileUUID%"] + taskexec = taskManager.execute + fileName = os.path.basename(os.path.abspath(commandReplacementDic["%relativeLocation%"])) + + databaseInterface.runSQL("""INSERT INTO Tasks (taskUUID, jobUUID, fileUUID, fileName, exec, arguments, createdTime) + VALUES ( '""" + taskUUID + databaseInterface.separator \ + + jobUUID + databaseInterface.separator \ + + escapeForDB(fileUUID) + databaseInterface.separator \ + + escapeForDB(fileName) + databaseInterface.separator \ + + escapeForDB(taskexec) + databaseInterface.separator \ + + escapeForDB(arguments) + databaseInterface.separator \ + + databaseInterface.getUTCDate() + "' )" ) + +def logTaskAssignedSQL(taskUUID, client, date): + databaseInterface.runSQL("UPDATE Tasks " + \ + "SET startTime='" + date + "', client='" + client + "' " + \ + "WHERE taskUUID='" + taskUUID + "';" ) + +def logTaskCompletedSQL(task): + print "Logging task output to db", task.UUID + taskUUID = task.UUID.__str__() + exitCode = task.results["exitCode"].__str__() + stdOut = task.results["stdOut"] + stdError = task.results["stdError"] + + databaseInterface.runSQL("UPDATE Tasks " + \ + "SET endTime='" + databaseInterface.getUTCDate() +"', exitCode='" + exitCode + "', " + \ + "stdOut='" + escapeForDB(stdOut) + "', stdError='" + escapeForDB(stdError) + "' " + "WHERE taskUUID='" + taskUUID + "'" ) + + +def logJobCreatedSQL(job): + separator = databaseInterface.getSeparator() + unitUUID = job.unit.UUID + if job.unit.owningUnit != None: + unitUUID = job.unit.owningUnit.UUID + databaseInterface.runSQL("""INSERT INTO Jobs (jobUUID, jobType, directory, SIPUUID, currentStep, unitType, microserviceGroup, createdTime, createdTimeDec, MicroServiceChainLinksPK, subJobOf) + VALUES ( '""" + job.UUID.__str__() + separator + escapeForDB(job.description) + separator \ + + escapeForDB(job.unit.currentPath) + separator + escapeForDB(unitUUID) + \ + separator + "Executing command(s)" + separator + job.unit.__class__.__name__ + separator + job.microserviceGroup.__str__() + separator + job.createdDate + separator + databaseInterface.getDeciDate("." + job.createdDate.split(".")[-1]) + "', " + job.pk.__str__() + ", '" + job.subJobOf.__str__() + "' )" ) + #TODO -un hardcode executing exeCommand + + +def logJobStepCompletedSQL(job): + databaseInterface.runSQL("""INSERT INTO jobStepCompleted (jobUUID, step, completedTime) + VALUES ( '""" + job.UUID.__str__() + databaseInterface.separator + job.step + databaseInterface.separator + databaseInterface.getUTCDate() + "' )" ) + +def fileWasRemoved(fileUUID, utcDate=databaseInterface.getUTCDate(), eventDetail = "", eventOutcomeDetailNote = "", eventOutcome=""): + eventIdentifierUUID = uuid.uuid4().__str__() + eventType = "file removed" + eventDateTime = utcDate + insertIntoEvents(fileUUID=fileUUID, \ + eventIdentifierUUID=eventIdentifierUUID, \ + eventType=eventType, \ + eventDateTime=eventDateTime, \ + eventDetail=eventDetail, \ + eventOutcome=eventOutcome, \ + eventOutcomeDetailNote=eventOutcomeDetailNote) + + + databaseInterface.runSQL("UPDATE Files " + \ + "SET removedTime='" + utcDate + "', currentLocation=NULL " + \ + "WHERE fileUUID='" + fileUUID + "'" ) + +def createSIP(path, UUID=None): + if UUID == None: + UUID = uuid.uuid4().__str__() + print "Creating SIP:", UUID, "-", path + sql = """INSERT INTO SIPs (sipUUID, currentPath) + VALUES ('""" + UUID + databaseInterface.separator + escapeForDB(path) + "');" + databaseInterface.runSQL(sql) + return UUID + +def deUnicode(str): + if str == None: + return None + return unicode(str).encode('utf-8') diff --git a/src/archivematicaCommon/lib/databaseInterface.py b/src/archivematicaCommon/lib/databaseInterface.py new file mode 100755 index 0000000000..cc1493606e --- /dev/null +++ b/src/archivematicaCommon/lib/databaseInterface.py @@ -0,0 +1,194 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaCommon +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +import MySQLdb +import os +import threading +import string +import sys +import time +from datetime import datetime + +global separator +separator = "', '" +printSQL = False + +#DB_CONNECTION_OPTS = dict(db="MCP", read_default_file="/etc/archivematica/archivematicaCommon/dbsettings") +DB_CONNECTION_OPTS = dict(db="MCP", read_default_file="/etc/archivematica/archivematicaCommon/dbsettings", charset="utf8", use_unicode = True) + +def reconnect(): + global database + retryAttempts = 3 + secondsBetweenRetry = 10 + for a in range(retryAttempts): + try: + database=MySQLdb.connect(**DB_CONNECTION_OPTS) + database.autocommit(0) + break + except Exception as inst: + print >>sys.stderr, "Error connecting to database:" + print >>sys.stderr, type(inst) # the exception instance + print >>sys.stderr, inst.args + time.sleep(secondsBetweenRetry) + if a+1 == retryAttempts: + raise Exception(inst) + +def getSeparator(): + global separator + return separator + +def getUTCDate(): + """Returns a string of the UTC date & time in ISO format""" + d = datetime.utcnow() + return d.isoformat('T') + +def getDeciDate(date): + valid = "." + string.digits + ret = "" + for c in date: + if c in valid: + ret += c + #else: + #ret += replacementChar + return ret + + +#sudo apt-get install python-mysqldb +sqlLock = threading.Lock() +sqlLock.acquire() +global database +reconnect() +sqlLock.release() + +def runSQL(sql): + global database + if printSQL: + print sql + if isinstance(sql, unicode): + sql = sql.encode('utf-8') + #print type(sql), sql + #found that even though it says it's compiled thread safe, running it multi-threaded crashes it. + sqlLock.acquire() + db = database + try: + #db.query(sql) + c=database.cursor() + c.execute(sql) + rows = c.fetchall() + except MySQLdb.OperationalError, message: + #errorMessage = "Error %d:\n%s" % (message[ 0 ], message[ 1 ] ) + if message[0] == 2006 and message[1] == 'MySQL server has gone away': + reconnect() + sqlLock.release() + runSQL(sql) + return + else: + print >>sys.stderr, "Error with query: ", sql + print >>sys.stderr, "Error %d:\n%s" % (message[ 0 ], message[ 1 ] ) + sqlLock.release() + exit(-100) + except Exception as inst: + print >>sys.stderr, "Error query: ", sql + print >>sys.stderr, type(inst) # the exception instance + print >>sys.stderr, inst.args + sqlLock.release() + raise Exception(inst) + db.commit() + sqlLock.release() + return + + + + +def querySQL(sql): + global database + if printSQL: + print sql + if isinstance(sql, unicode): + sql = sql.encode('utf-8') + sqlLock.acquire() + try: + c=database.cursor() + c.execute(sql) + except MySQLdb.OperationalError, message: + #errorMessage = "Error %d:\n%s" % (message[ 0 ], message[ 1 ] ) + if message[0] == 2006 and message[1] == 'MySQL server has gone away': + reconnect() + import time + time.sleep(10) + c=database.cursor() + c.execute(sql) + else: + print >>sys.stderr, "Error with query: ", sql + print >>sys.stderr, "Error %d:\n%s" % (message[ 0 ], message[ 1 ] ) + except Exception as inst: + print >>sys.stderr, "Error query: ", sql + print >>sys.stderr, type(inst) # the exception instance + print >>sys.stderr, inst.args + sqlLock.release() + raise Exception(inst) + return c, sqlLock +# row = c.fetchone() +# while row != None: +# fileUUID = row[0] +# filesToChecksum.append(row[0]) +# row = c.fetchone() + + +def queryAllSQL(sql): + global database + if printSQL: + print sql + if isinstance(sql, unicode): + sql = sql.encode('utf-8') + sqlLock.acquire() + #print sql + rows = [] + try: + c=database.cursor() + c.execute(sql) + rows = c.fetchall() + sqlLock.release() + except MySQLdb.OperationalError, message: + #errorMessage = "Error %d:\n%s" % (message[ 0 ], message[ 1 ] ) + if message[0] == 2006 and message[1] == 'MySQL server has gone away': + reconnect() + import time + time.sleep(10) + c=database.cursor() + c.execute(sql) + rows = c.fetchall() + sqlLock.release() + else: + print >>sys.stderr, "Error with query: ", sql + print >>sys.stderr, "Error %d:\n%s" % (message[ 0 ], message[ 1 ] ) + sqlLock.release() + exit(-100) + except Exception as inst: + print >>sys.stderr, "Error query: ", sql + print >>sys.stderr, type(inst) # the exception instance + print >>sys.stderr, inst.args + sqlLock.release() + raise Exception(inst) + return rows diff --git a/src/archivematicaCommon/lib/elasticSearchFunctions.py b/src/archivematicaCommon/lib/elasticSearchFunctions.py new file mode 100755 index 0000000000..be41509346 --- /dev/null +++ b/src/archivematicaCommon/lib/elasticSearchFunctions.py @@ -0,0 +1,234 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaCommon +# @author Mike Cantelon <mike@artefactual.com> +# @version svn: $Id$ + +import time +import os +import sys +import MySQLdb +import cPickle +import base64 +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +sys.path.append("/usr/lib/archivematica/archivematicaCommon/externals") +import pyes +import xmltodict +import xml.etree.ElementTree as ElementTree + +pathToElasticSearchServerFile='/etc/elasticsearch/elasticsearch.yml' + +def connect_and_index(index, type, uuid, pathToArchive): + + exitCode = 0 + + # make sure elasticsearch is installed + if (os.path.exists(pathToElasticSearchServerFile)): + + # make sure transfer files exist + if (os.path.exists(pathToArchive)): + conn = pyes.ES('127.0.0.1:9200') + try: + conn.create_index(index) + except pyes.exceptions.IndexAlreadyExistsException: + pass + + # use METS file if indexing an AIP + metsFilePath = os.path.join(pathToArchive, 'METS.' + uuid + '.xml') + + if os.path.isfile(metsFilePath): + filesIndexed = index_mets_file_metadata( + conn, + uuid, + metsFilePath, + index, + type + ) + + else: + filesIndexed = index_directory_files( + conn, + uuid, + pathToArchive, + index, + type + ) + + print type + ' UUID: ' + uuid + print 'Files indexed: ' + str(filesIndexed) + + else: + print >>sys.stderr, "Directory does not exist: ", pathToArchive + exitCode = 1 + else: + print >>sys.stderr, "Elasticsearch not found, normally installed at ", pathToElasticSearchServerFile + exitCode = 1 + + return exitCode + +def index_mets_file_metadata(conn, uuid, metsFilePath, index, type): + filesIndexed = 0 + filePathAmdIDs = {} + filePathMetsData = {} + + # establish structure to be indexed for each file item + fileData = { + 'archivematicaVersion': '0.9', + 'AIPUUID': uuid, + 'indexedAt': time.time(), + 'filePath': '', + 'METS': { + 'dmdSec': {}, + 'amdSec': {} + } + } + dmdSecData = {} + + # parse XML + tree = ElementTree.parse(metsFilePath) + root = tree.getroot() + + # get SIP-wide dmdSec + dmdSec = root.findall("{http://www.loc.gov/METS/}dmdSec/{http://www.loc.gov/METS/}mdWrap/{http://www.loc.gov/METS/}xmlData") + for item in dmdSec: + xml = ElementTree.tostring(item) + dmdSecData = xmltodict.parse(xml) + + # get amdSec IDs for each filepath + for item in root.findall("{http://www.loc.gov/METS/}fileSec/{http://www.loc.gov/METS/}fileGrp[@USE='original']/{http://www.loc.gov/METS/}file"): + for item2 in item.findall("{http://www.loc.gov/METS/}FLocat"): + filePath = item2.attrib['{http://www.w3.org/1999/xlink}href'] + filePathAmdIDs[filePath] = item.attrib['ADMID'] + + # for each filepath, get data and convert to dictionary then index everything in the appropriate amdSec element + for filePath in filePathAmdIDs: + filesIndexed = filesIndexed + 1 + items = root.findall("{http://www.loc.gov/METS/}amdSec[@ID='" + filePathAmdIDs[filePath] + "']") + for item in items: + if item != None: + xml = ElementTree.tostring(item) + + # set up data for indexing + indexData = fileData + indexData['filePath'] = filePath + indexData['METS']['dmdSec'] = rename_dict_keys_with_child_dicts(normalize_dict_values(dmdSecData)) + indexData['METS']['amdSec'] = rename_dict_keys_with_child_dicts(normalize_dict_values(xmltodict.parse(xml))) + + # index data + result = conn.index(indexData, index, type) + + backup_indexed_document(result, indexData, index, type) + + print 'Indexed AIP files and corresponding METS XML.' + + return filesIndexed + +# To avoid Elasticsearch schema collisions, if a dict value is itself a +# dict then rename the dict key to differentiate it from similar instances +# where the same key has a different value type. +# +def rename_dict_keys_with_child_dicts(data): + new = {} + for key in data: + if type(data[key]) is dict: + new[key + '_data'] = rename_dict_keys_with_child_dicts(data[key]) + elif type(data[key]) is list: + new[key + '_list'] = rename_list_elements_if_they_are_dicts(data[key]) + else: + new[key] = data[key] + return new + +def rename_list_elements_if_they_are_dicts(list): + for index, value in enumerate(list): + if type(value) is list: + list[index] = rename_list_elements_if_they_are_dicts(value) + elif type(value) is dict: + list[index] = rename_dict_keys_with_child_dicts(value) + return list + +# Because an XML document node may include one or more children, conversion +# to a dict can result in the converted child being one of two types. +# this causes problems in an Elasticsearch index as it expects consistant +# types to be indexed. +# The below function recurses a dict and if a dict's value is another dict, +# it encases it in a list. +# +def normalize_dict_values(data): + for key in data: + if type(data[key]) is dict: + data[key] = [normalize_dict_values(data[key])] + elif type(data[key]) is list: + data[key] = normalize_list_dict_elements(data[key]) + return data + +def normalize_list_dict_elements(list): + for index, value in enumerate(list): + if type(value) is list: + list[index] = normalize_list_dict_elements(value) + elif type(value) is dict: + list[index] = normalize_dict_values(value) + return list + +def index_directory_files(conn, uuid, pathToTransfer, index, type): + filesIndexed = 0 + + # document structure + transferData = { + 'uuid': uuid, + 'created': time.time() + } + + # compile file data (relative filepath, extension, size) + fileData = {} + for filepath in list_files_in_dir(pathToTransfer): + if os.path.isfile(filepath): + fileData[filepath] = { + 'basename': os.path.basename(filepath) + } + filesIndexed = filesIndexed + 1 + + transferData['filepaths'] = fileData + + # add document to index + conn.index(transferData, index, type) + + return filesIndexed + +def list_files_in_dir(path, filepaths=[]): + # define entries + for file in os.listdir(path): + child_path = os.path.join(path, file) + filepaths.append(child_path) + + # if entry is a directory, recurse + if os.path.isdir(child_path) and os.access(child_path, os.R_OK): + list_files_in_dir(child_path, filepaths) + + # return fully traversed data + return filepaths + +def backup_indexed_document(result, indexData, index, type): + sql = "INSERT INTO ElasticsearchIndexBackup (docId, data, indexName, typeName) VALUES ('%s', '%s', '%s', '%s')" + + sql = sql % (MySQLdb.escape_string(result['_id']), unicode(base64.encodestring(cPickle.dumps(indexData))), MySQLdb.escape_string(index), MySQLdb.escape_string(type)) + + databaseInterface.runSQL(sql) diff --git a/src/archivematicaCommon/lib/executeOrRun.py b/src/archivematicaCommon/lib/executeOrRun.py new file mode 100644 index 0000000000..f1b58cdaf9 --- /dev/null +++ b/src/archivematicaCommon/lib/executeOrRun.py @@ -0,0 +1,99 @@ +#!/usr/bin/python -OO +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaCommon +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +import shlex +import uuid +import os +import threading +from twisted.internet import protocol as twistedProtocol +from twisted.internet import reactor + +import re + +class twistedLaunchSubProcess(twistedProtocol.ProcessProtocol): + def __init__(self, doneLock, stdIn="", printing=True): + self.stdIn = stdIn + self.stdOut = "" + self.stdError = "" + self.doneLock = doneLock + self.exitCode = None + self.printing = printing + + def connectionMade(self): + if self.stdIn: + self.transport.write(self.stdIn) + self.transport.closeStdin() # tell them we're done + def outReceived(self, stdOut): + if self.printing: + print stdOut + self.stdOut = self.stdOut + stdOut + + def errReceived(self, stdError): + if self.printing: + print stdError + self.stdError = self.stdError + stdError + + def processEnded(self, reason): + self.exitCode = reason.value.exitCode + self.doneLock.release() + + +def launchSubProcess(command, stdIn="", printing=True): + doneLock = threading.Lock() + doneLock.acquire() + tsp = twistedLaunchSubProcess(doneLock, stdIn, printing) + commands = shlex.split(command) + reactor.spawnProcess(tsp, commands[0], commands, {}) + if not reactor._started: + reactor.run() + doneLock.acquire() + return tsp.exitCode, tsp.stdOut, tsp.stdError + + + +def createAndRunScript(text, stdIn="", printing=True): + #output the text to a /tmp/ file + scriptPath = "/tmp/" + uuid.uuid4().__str__() + FILE = os.open(scriptPath, os.O_WRONLY | os.O_CREAT, 0770) + os.write(FILE, text) + os.close(FILE) + + #run it + ret = launchSubProcess(scriptPath, stdIn="", printing=True) + + #remove the temp file + os.remove(scriptPath) + + return ret + + + +def executeOrRun(type, text, stdIn="", printing=True): + if type == "command": + return launchSubProcess(text, stdIn=stdIn, printing=printing) + if type == "bashScript": + text = "#!/bin/bash\n" + text + return createAndRunScript(text, stdIn=stdIn, printing=printing) + if type == "pythonScript": + text = "#!/usr/bin/python -OO\n" + text + return createAndRunScript(text, stdIn=stdIn, printing=printing) diff --git a/src/archivematicaCommon/lib/executeOrRunSubProcess.py b/src/archivematicaCommon/lib/executeOrRunSubProcess.py new file mode 100644 index 0000000000..1ab7626b6a --- /dev/null +++ b/src/archivematicaCommon/lib/executeOrRunSubProcess.py @@ -0,0 +1,79 @@ +#!/usr/bin/python -OO +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see <http://www.gnu.org/licenses/>. + +# @package Archivematica +# @subpackage archivematicaCommon +# @author Joseph Perry <joseph@artefactual.com> +# @version svn: $Id$ + +import subprocess +import shlex +import uuid +import os +import sys + +def launchSubProcess(command, stdIn="", printing=True): + stdError = "" + stdOut = "" + #print >>sys.stderr, command + try: + p = subprocess.Popen(shlex.split(command), stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) + stdOut, stdError = p.communicate(input=stdIn) + #append the output to stderror and stdout + if printing: + print stdOut + print >>sys.stderr, stdError + retcode = p.returncode + except OSError, ose: + print >>sys.stderr, "Execution failed:", ose + return -1, "Config Error!", ose.__str__() + except Exception as inst: + print >>sys.stderr, "Execution failed:", command + print >>sys.stderr, type(inst) # the exception instance + print >>sys.stderr, inst.args + return -1, "Execution failed:", command + return retcode, stdOut, stdError + + + +def createAndRunScript(text, stdIn="", printing=True): + #output the text to a /tmp/ file + scriptPath = "/tmp/" + uuid.uuid4().__str__() + FILE = os.open(scriptPath, os.O_WRONLY | os.O_CREAT, 0770) + os.write(FILE, text) + os.close(FILE) + + #run it + ret = launchSubProcess(scriptPath, stdIn="", printing=True) + + #remove the temp file + os.remove(scriptPath) + + return ret + + + +def executeOrRun(type, text, stdIn="", printing=True): + if type == "command": + return launchSubProcess(text, stdIn=stdIn, printing=printing) + if type == "bashScript": + text = "#!/bin/bash\n" + text + return createAndRunScript(text, stdIn=stdIn, printing=printing) + if type == "pythonScript": + text = "#!/usr/bin/python -OO\n" + text + return createAndRunScript(text, stdIn=stdIn, printing=printing) diff --git a/src/archivematicaCommon/lib/externals/__init__.py b/src/archivematicaCommon/lib/externals/__init__.py new file mode 100755 index 0000000000..e69de29bb2 diff --git a/src/archivematicaCommon/lib/externals/checksummingTools.py b/src/archivematicaCommon/lib/externals/checksummingTools.py new file mode 100755 index 0000000000..f7fd009994 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/checksummingTools.py @@ -0,0 +1,33 @@ +#!/usr/bin/python -OO +import hashlib + +#Borrowed from http://stackoverflow.com/questions/1131220/get-md5-hash-of-a-files-without-open-it-in-python +def md5_for_file(fileName, block_size=2**20): + f = open(fileName) + md5 = hashlib.md5() + while True: + data = f.read(block_size) + if not data: + break + md5.update(data) + #return md5.digest() + return md5.hexdigest() + +#Modification of above borrowed function +def sha_for_file(fileName, block_size=2**20): + f = open(fileName) + sha = hashlib.sha256() + while True: + data = f.read(block_size) + if not data: + break + sha.update(data) + #return sha.digest() + return sha.hexdigest() + + +if __name__ == '__main__': + import sys + theFile = sys.argv[1] + print "md5:", md5_for_file(theFile) + print "sha256:", sha_for_file(theFile) diff --git a/src/archivematicaCommon/lib/externals/detectCores.py b/src/archivematicaCommon/lib/externals/detectCores.py new file mode 100755 index 0000000000..6811719db4 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/detectCores.py @@ -0,0 +1,26 @@ +#!/usr/bin/python -OO +#Author Bruce Eckel (www.BruceEckel.com) +#Source http://www.artima.com/weblogs/viewpost.jsp?thread=230001 + +import os + +def detectCPUs(): + """ + Detects the number of CPUs on a system. Cribbed from pp. + """ + # Linux, Unix and MacOS: + if hasattr(os, "sysconf"): + if os.sysconf_names.has_key("SC_NPROCESSORS_ONLN"): # Linux & Unix: + ncpus = os.sysconf("SC_NPROCESSORS_ONLN") + if isinstance(ncpus, int) and ncpus > 0: + return ncpus + else: # OSX: + return int(os.popen2("sysctl -n hw.ncpu")[1].read()) # Windows: + if os.environ.has_key("NUMBER_OF_PROCESSORS"): + ncpus = int(os.environ["NUMBER_OF_PROCESSORS"]); + if ncpus > 0: + return ncpus + return 1 # Default + +if __name__ == '__main__': + print detectCPUs() diff --git a/src/archivematicaCommon/lib/externals/extractMaildirAttachments.py b/src/archivematicaCommon/lib/externals/extractMaildirAttachments.py new file mode 100755 index 0000000000..3923c0240d --- /dev/null +++ b/src/archivematicaCommon/lib/externals/extractMaildirAttachments.py @@ -0,0 +1,124 @@ +#!/usr/bin/python -OO +# vim:fileencoding=utf8 + +#Author Ian Lewis +#http://www.ianlewis.org/en/parsing-email-attachments-python + + +# Modification +# Author Joseph Perry +# date Aug 10 2010 +# Using rfc6266 library + +from email.Header import decode_header +import email +from base64 import b64decode +import sys +from email.Parser import Parser as EmailParser +from email.utils import parseaddr +# cStringIOã¯ãƒ€ãƒ¡ +from StringIO import StringIO +from rfc6266 import parse_headers #TODO: add notes + +class NotSupportedMailFormat(Exception): + pass + +def parse_attachment(message_part, attachments=None): + content_disposition = message_part.get("Content-Disposition", None) + if content_disposition: + try: + cd = parse_headers(content_disposition, relaxed=True) + if cd.disposition.lower() == "attachment": + if not cd.assocs.has_key("filename"): + #print error or warning? + return None + else: + file_data = message_part.get_payload(decode=True) + if not file_data: + payload = message_part.get_payload() + if isinstance(payload, list): + for msgobj in payload: + parse2(msgobj, attachments) + return None + print >>sys.stderr, message_part.get_payload() + print >>sys.stderr, message_part.get_content_charset() + attachment = StringIO(file_data) + attachment.content_type = message_part.get_content_type() + attachment.size = len(file_data) + attachment.name = cd.assocs['filename'] + attachment.create_date = None + attachment.mod_date = None + attachment.read_date = None + + for name, value in cd.assocs.iteritems(): + if name == "create-date": + attachment.create_date = value #TODO: datetime + elif name == "modification-date": + attachment.mod_date = value #TODO: datetime + elif name == "read-date": + attachment.read_date = value #TODO: datetime + + return attachment + + except: + print >>sys.stderr, "content_disposition:", content_disposition + raise + return None + +def parse(content): + """ + Eメールã®ã‚³ãƒ³ãƒ†ãƒ³ãƒ„ã‚’å—ã‘å–ã‚Šparse,encodeã—ã¦è¿”ã™ + """ + p = EmailParser() + msgobj = p.parse(content) + attachments = [] + return parse2(msgobj, attachments) + +def parse2(msgobj, attachments=None): + if msgobj['Subject'] is not None: + decodefrag = decode_header(msgobj['Subject']) + subj_fragments = [] + for s , enc in decodefrag: + if enc: + s = s.decode(enc) + subj_fragments.append(s) + subject = ''.join(subj_fragments) + else: + subject = None + + if attachments == None: + attachments = [] + body = None + html = None + for part in msgobj.walk(): + attachment = parse_attachment(part, attachments=attachments) + if attachment: + attachments.append(attachment) + elif part.get_content_type() == "text/plain": + if body is None: + body = "" + payload = part.get_payload() + encoding = part.get_content_charset() + if encoding: + encoding = encoding.replace("windows-874", "cp874") + payload = payload.decode(encoding, 'replace') + body += payload + elif part.get_content_type() == "text/html": + if html is None: + html = "" + payload = part.get_payload() + encoding = part.get_content_charset() + if encoding: + encoding = encoding.replace("windows-874", "cp874") + payload = payload.decode(encoding, 'replace') + html += payload + return { + 'subject' : subject, + 'body' : body, + 'html' : html, + 'from' : parseaddr(msgobj.get('From'))[1], # åå‰ã¯é™¤ã„ã¦ãƒ¡ãƒ¼ãƒ«ã‚¢ãƒ‰ãƒ¬ã‚¹ã®ã¿æŠ½å‡º + 'to' : parseaddr(msgobj.get('To'))[1], # åå‰ã¯é™¤ã„ã¦ãƒ¡ãƒ¼ãƒ«ã‚¢ãƒ‰ãƒ¬ã‚¹ã®ã¿æŠ½å‡º + 'attachments': attachments, + 'msgobj': msgobj, + } + diff --git a/src/archivematicaCommon/lib/externals/maildirToMbox.py b/src/archivematicaCommon/lib/externals/maildirToMbox.py new file mode 100755 index 0000000000..c6757351bc --- /dev/null +++ b/src/archivematicaCommon/lib/externals/maildirToMbox.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# source: https://gist.github.com/1709069 +# author/owner: nyergler github gist +""" +Frédéric Grosshans, 19 January 2012 +Nathan R. Yergler, 6 June 2010 + +This file does not contain sufficient creative expression to invoke +assertion of copyright. No warranty is expressed or implied; use at +your own risk. + +--- + +Uses Python's included mailbox library to convert mail archives from +maildir [http://en.wikipedia.org/wiki/Maildir] to +mbox [http://en.wikipedia.org/wiki/Mbox] format, icluding subfolder. + +See http://docs.python.org/library/mailbox.html#mailbox.Mailbox for +full documentation on this library. + +--- + +To run, save as md2mb.py and run: + +$ python md2mb.py [maildir_path] [mbox_filename] + +[maildir_path] should be the the path to the actual maildir (containing new, +cur, tmp, and the subfolders, which are hidden directories with names like +.subfolde.subsubfolder.subsubsbfolder); + +[mbox_filename] will be newly created, as well as a [mbox_filename].sbd the +directory. +""" + +import mailbox +import sys +import email +import os + +def maildir2mailbox(maildirname, mboxfilename): + """ + slightly adapted from maildir2mbox.py, + Nathan R. Yergler, 6 June 2010 + http://yergler.net/blog/2010/06/06/batteries-included-or-maildir-to-mbox-again/ + + + """ + # open the existing maildir and the target mbox file + maildir = mailbox.Maildir(maildirname, email.message_from_file) + mbox = mailbox.mbox(mboxfilename) + + # lock the mbox + mbox.lock() + + # iterate over messages in the maildir and add to the mbox + for msg in maildir: + mbox.add(msg) + + # close and unlock + mbox.close() + maildir.close() + +def maildir2mailbox2(dirname, mboxname): + mboxdirname=mboxname+'.sbd' + maildir2mailbox(dirname,mboxname) + #if not os.path.exists(mboxdirname): os.makedirs(mboxdirname) + + listofdirs=[dn for dn in os.walk(dirname).next()[1] if dn not in ['new', 'cur', 'tmp']] + for curfold in listofdirs: + curlist=[mboxname]+curfold.split('.') + curpath=os.path.join(*[dn+'.sbd' for dn in curlist if dn]) + if not os.path.exists(curpath): os.makedirs(curpath) + print '| ' +curfold +' -> '+curpath[:-4] + maildir2mailbox(os.path.join(dirname,curfold),curpath[:-4]) + + +if __name__ == "__main__": + dirname=sys.argv[-2] + mboxname=sys.argv[-1] + print(dirname + ' -> ' +mboxname) + maildir2mailbox2(dirname, mboxname) + print('Done') diff --git a/src/archivematicaCommon/lib/externals/mets/mets.xsd b/src/archivematicaCommon/lib/externals/mets/mets.xsd new file mode 100644 index 0000000000..fd25a6c4f6 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/mets/mets.xsd @@ -0,0 +1,1694 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!-- METS: Metadata Encoding and Transmission Standard --> +<!-- Copyright © 2001, 2002, 2003, 2004, 2005, 2006, 2008, 2009, 2010, 2012 Digital Library Federation --> +<!-- Prepared for the Digital Library Federation by Jerome McDonough, New York University, +with the assistance of Michael Alexander (British Library), Joachim Bauer (Content Conversion Specialists, Germany), Rick Beaubien (University of California), Terry Catapano (Columbia University), Morgan Cundiff (Library of Congress), Susan Dahl (University of Alberta), Markus Enders (State and University Library, Göttingen/British Library), Richard Gartner (Bodleian Library at Oxford/King's College, London), Thomas Habing (University of Illinois at Urbana-Champaign), Nancy Hoebelheinrich (Stanford University/Knowledge Motifs LLC), Arwen Hutt (U.C. San Diego), Mark Kornbluh (Michigan State University), Cecilia Preston (Preston & Lynch), Merrilee Proffitt (Research Libraries Group), Clay Redding (Library of Congress), Jenn Riley (Indiana University), Richard Rinehart (Berkeley Art Museum/Pacific Film Archive), Mackenzie Smith (Massachusetts Institute of Technology), Tobias Steinke (German National Library), Taylor Surface (OCLC), Brian Tingle (California Digital Library) and Robin Wendler (Harvard University), Robert Wolfe (Massachusetts Institute of Technology), Patrick Yott (Brown University). +--> +<!-- March, 2012 --> +<!-- Version 1.9.1 --> +<!-- Change History --> +<!-- April 23, 2001: Alpha Draft completed --> +<!-- June 7, 2001: Beta completed --> +<!-- 6/7/2001 Beta Changes: + 1. add 'Time' as a possible time code value, as well as TCF. + 2. Make dmdSec ID attribute required; make ID attribute optional on MDRef/MDWrap. + 3. Add 'Label' attribute to StructMap, along with 'Type'. + 4. Add DDI and FGDC as potential metadata schemes to enumeration. + 5. Enable an "otherMDtype" attribute for MDWrap/MDRef and any other element where + there's an 'other' in the enumerated possibilities. + 6. Add a "profile" attribute to METS element. + 7. Revised mptr declaration so that it's like FLocat/MDRef (and not like XLink) + 8. Extend internal documentation of <area> attributes. + 9. Add "other" to the possible set of LOCTYPEs. + 10. Change ADMIDS to ADMID on FileGrp. + 11. Change "N" to "Order" on <div> element. + 12. Change "Number" to "order label" on <div> element + 13. Add createdate and lastmoddate attributes to mets element. + 14. Allow <div> and <area> elements to link to administrative metadata sections. + 15. Normalize attribute pointing facilities for file element and mdRef. + 16. Provide a LOCTYPE of "other" and an "otherloctype" attribute for pointing to external files. + 17. Drop PDI from enumeration of LOCTYPES. + 18. Make MDTYPE required in mdRef and mdWrap. + 19. Rename preservationMD to digiprovMD. + 20. Add optional CHECKSUM attribute to FContent element. + 21. Modularize declarations of fileGrpType and mdSecType attributes and enumerations to + simplify maintenance. + 22. Add TYPE attribute to structMap. + 23. Declare structMap element using structMapType rather than direct declaration. + 24. Add area element as possible subelement to <div>, along with par and seq. + 25. Change mdSec model to ALL, to enable differing order of mdRef/mdWrap elements. + 26. Extend documentation on <par> and <seq> elements. + --> +<!-- October 22, 2001: Gamma completed --> +<!-- 10/22/2001 Gamma changes: + 1. Added optional fileSec element beneath METS root element to contain fileGrps. + 2. Created subsidiary schema file xlink.xsd for XLink attributes, restored XLink attributes + to mptr element, and added XLink support to mdRef and FLocat. + 3. Created new element metsHdr to handle metadata regarding METS document + itself (analogous to TEI Header). Moved CREATEDATE and LASTMODDATE attributes + to metsHdr, and added new RECORDSTATUS attribute. Added new subsidiary elements + agent and altRecordID to metsHdr. + 4. Made CREATEDATE and LASTMODDATE attributes type xsd:dateTime to allow more precise + recording of when work was done. + 5. Changed all attributes using data type of xsd:binary to xsd:base64Binary to conform to final + W3C schema recommendations. + 6. Cleaned up annotations/documentation. + --> +<!-- December 19, 2001: Epsilon and PROTOFINAL completed--> +<!-- 12/19/2001 Epsilon changes: + 1. Changed sequence operator for StructMap so that only 1 root div element is permitted. + 2. Add new roles to agent element's role attribute and support for extensible 'other' role. + 3. Add support for extensible 'other' type attribute on agent element. + 4. Yet more documentation clean up. + 5. Relocate CHECKSUM attribute from FContent to File element. + 6. Change the file element's CREATED attribute and fileGroup's VERSDATE attribute to + a type of xsd:dateTime + 7. Change attribute name DMD for div element to DMDID for consistency's sake. + 8. Added new behaviorSec for support of referencing executable code from METS object + --> +<!-- February 8, 2002: Zeta bug fix to final --> +<!-- 2/8/2002 Zeta changes: + + 1. Eliminated redundant VRA in metadata type enumeration. + 2. Changed mdWrap content model, adding xmlData element to eliminate + ambiguous content model + --> +<!-- June 3, 2002: Version 1.1 --> +<!-- 6/3/2002 v1.1 changes: + + 1. Add new structLink section for recording hyperlinks between media represented by structMap nodes. + 2. Allow a <par> element to + contain a <seq> --> +<!-- Dec. 27, 2002: Version 1.2 --> +<!-- 12/27/2002 v1.2 changes: +1. Add “USE†attribute to FileGrp, File, FLocat and FContent; +2. Make FLocat repeatable; +3. Have FContent mimic mdWrap in using separate binData/xmlData sections; +4. Copyright statement added; +5. Allow both FLocat and Fcontent in single file element; +6. Allow behaviorSec elements to group through GROUPID attribute; +7. allow descriptive and administrative metadata sections to be grouped through GROUPID attribute; +8. allow <file> element to point to descriptive metadata via DMDID attribute; +9. allow descriptive metadata and all forms of administrative metadata to point to administrative metadata via ADMID attribute; +10. CREATED and STATUS attributes added to all desc. and adm. metadata sections; and +11. clean up documentation in elements to reflect reality. +--> +<!-- May 8, 2003: Version 1.3 --> +<!-- 05/05/2003 v1.3 changes: + +1. Change “2. OBJID: a primary identifier assigned to the original source document†to “2. OBJID: a primary identifier assigned to the METS object.†+2. Add MODS to MDTYPEs. +3. Modify <file> attributes so that instead of just CHECKSUM we have CHECKSUM and CHECKSUMTYPE, where CHECKSUMTYPE is a controlled vocabulary as follows: + HAVAL, MD5, SHA-1, SHA-256, SHA-384, SHA-512, TIGER, WHIRLPOOL +4.Alter BehaviorSec to make it recursive, and add a new behavior element to wrap mechanism and interfaceDef elements. +--> +<!-- May 1, 2004: Version 1.4 --> +<!-- 05/01/2003 v1.4 changes: + +1. Moved attribute documentation out of element documentation +(thank you, Brian Tingle). +2. New CONTENTIDS attribute (and URIs simpleType) added to div, fptr, +mptr and area elements for mapping MPEG21 DII Identifier values +3. XLink namespace URI changed to conform with XLink recommendation. +4. ID Attribute added to FContent. +5. ID Attribute addedt to structLink. +6. ID Attribute added to smLink. +7. "LOM" added as metadata type. + --> + <!-- April 12, 2005: Version 1.5 --> + <!-- 04/12/2005 v1.5 changes: + + 1. Made file element recursive to deal with PREMIS Onion Layer model and + support XFDU-ish unpacking specification. + 2. Add <stream> element beneath <file> to allow linking of metadata to + subfile structures. + 3. Modify structLink TO and FROM attributes to put them in XLink namespace. + 4. Make processContents "lax" for all xsd:any elements. + --> + <!-- October 18, 2006: Version 1.6 --> + <!-- 10/18/2006 v1.6 changes: + + 1. add ID to stream and transformFile + 2. add ADMID to metsHdr + 3. make smLink/@xlink:to and smLink/@xlink:from required + --> +<!-- October 16, 2007/ Jan 20, 2008: Version 1.7 --> +<!-- 10/16/2007 01/30/2008 v 1.7 changes: + +1. create parType complex type to allow a seq to contain a par +2. create FILECORE attribute group with MIMETYPE, SIZE, CHECKSUM, CHECKSUMTYPE; + change fileType, mdWrapType and mdRefType use the attribute group, so mdType and mdRef end + up with new SIZE, CHECKSUM, and CHECKSUMTYPE attributes (file does not change) +20080130 +2a. CREATED added to FILECORE +3. PREMIS:OBJECT PREMIS:AGENT PREMIS:RIGHTS PREMIS:EVENT added to MDTYPE value enumeration +--> +<!-- April 2009: Version 1.8 --> +<!-- Version 1.8 changes: + 1. Add CRC32, Adler-32, MNP to the enumerated values constraining CHECKSUMTYPE to align with MIX messageDigestAlgorithm constraints. + 2. Add TEXTMD and METSRIGHTS to the enumeration values constraining MDTYPE. + 3. Add an MDTYPEVERSION attribute as a companion to the MDTYPE attribute in the mdRef and mdWrap elements. + 4. ID and STRUCTID attributes on the behavior element made optional. Depending on whether the behavior applies to a transformFile element or div elements in the structMap, only one or the other of the attributes would pertain. + 5. Documentation aligned with the METS Primer, and corrected. + 6. xml:lang="en" atttribute value added to every <documentation> element + 7. xlink:extendedLink support added to the <structLink> element by means of a new <smLinkGrp> element, and its child <smLocatorLink> and <smArcLink> elements. +--> +<!--February 2010: Version 1.9--> +<!--Version 1.9 Changes: + 1. Added a <metsDocumentID> element to the <metsHdr> for recording a unique identifier for the METS document itself where this is different from the OBJID, the identifier for the entire digital object represented by the METS document. + 2. Added "ISO 19115:2003 NAP" to the enumerated values for the MDTYPE attribute in the METADATA attribute group. + 3. Added "XPTR" to the enumerated values for the BETYPE attribute on the areaType data type + 4. Added BEGIN, END and BETYPE attributes to the <file> and <stream> elements for specifying the location of a nested file or a stream within it's parent file. +--> +<!-- March 2012: Version 1.9.1 --> +<!-- Version 1.9.1 Changes:: + 1. Added 'EAC-CPF' as potential metadata scheme to MDTYPE enumeration + EAC-CPF = Encoded Archival Context - Corporate Bodies, Persons, and Families + http://eac.staatsbibliothek-berlin.de/eac-cpf-schema.html +--> +<xsd:schema targetNamespace="http://www.loc.gov/METS/" xmlns="http://www.loc.gov/METS/" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified"> + <xsd:import namespace="http://www.w3.org/1999/xlink" schemaLocation="http://www.loc.gov/standards/xlink/xlink.xsd"/> + + <xsd:element name="mets"> + <xsd:annotation> + <xsd:documentation xml:lang="en">METS: Metadata Encoding and Transmission Standard. + METS is intended to provide a standardized XML format for transmission of complex digital library objects between systems. As such, it can be seen as filling a role similar to that defined for the Submission Information Package (SIP), Archival Information Package (AIP) and Dissemination Information Package (DIP) in the Reference Model for an Open Archival Information System. The root element <mets> establishes the container for the information being stored and/or transmitted by the standard. + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:complexContent> + <xsd:extension base="metsType"/> + </xsd:complexContent> + </xsd:complexType> + </xsd:element> + <xsd:complexType name="metsType"> + <xsd:annotation> + <xsd:documentation xml:lang="en">metsType: Complex Type for METS Sections + A METS document consists of seven possible subsidiary sections: metsHdr (METS document header), dmdSec (descriptive metadata section), amdSec (administrative metadata section), fileGrp (file inventory group), structLink (structural map linking), structMap (structural map) and behaviorSec (behaviors section). + </xsd:documentation> + </xsd:annotation> + <xsd:sequence> + <xsd:element name="metsHdr" minOccurs="0"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The mets header element <metsHdr> captures metadata about the METS document itself, not the digital object the METS document encodes. Although it records a more limited set of metadata, it is very similar in function and purpose to the headers employed in other schema such as the Text Encoding Initiative (TEI) or in the Encoded Archival Description (EAD). + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:sequence> + <xsd:element name="agent" minOccurs="0" maxOccurs="unbounded"> + <xsd:annotation> + <xsd:documentation xml:lang="en">agent: + The agent element <agent> provides for various parties and their roles with respect to the METS record to be documented. + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:sequence> + <xsd:element name="name" type="xsd:string"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The element <name> can be used to record the full name of the document agent. + </xsd:documentation> + </xsd:annotation> + </xsd:element> + <xsd:element name="note" type="xsd:string" minOccurs="0" maxOccurs="unbounded"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The <note> element can be used to record any additional information regarding the agent's activities with respect to the METS document. + </xsd:documentation> + </xsd:annotation> + </xsd:element> + </xsd:sequence> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="ROLE" use="required"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ROLE (string/R): Specifies the function of the agent with respect to the METS record. The allowed values are: +CREATOR: The person(s) or institution(s) responsible for the METS document. +EDITOR: The person(s) or institution(s) that prepares the metadata for encoding. +ARCHIVIST: The person(s) or institution(s) responsible for the document/collection. +PRESERVATION: The person(s) or institution(s) responsible for preservation functions. +DISSEMINATOR: The person(s) or institution(s) responsible for dissemination functions. +CUSTODIAN: The person(s) or institution(s) charged with the oversight of a document/collection. +IPOWNER: Intellectual Property Owner: The person(s) or institution holding copyright, trade or service marks or other intellectual property rights for the object. +OTHER: Use OTHER if none of the preceding values pertains and clarify the type and location specifier being used in the OTHERROLE attribute (see below). + </xsd:documentation> + </xsd:annotation> + <xsd:simpleType> + <xsd:restriction base="xsd:string"> + <xsd:enumeration value="CREATOR"/> + <xsd:enumeration value="EDITOR"/> + <xsd:enumeration value="ARCHIVIST"/> + <xsd:enumeration value="PRESERVATION"/> + <xsd:enumeration value="DISSEMINATOR"/> + <xsd:enumeration value="CUSTODIAN"/> + <xsd:enumeration value="IPOWNER"/> + <xsd:enumeration value="OTHER"/> + </xsd:restriction> + </xsd:simpleType> + </xsd:attribute> + <xsd:attribute name="OTHERROLE" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">OTHERROLE (string/O): Denotes a role not contained in the allowed values set if OTHER is indicated in the ROLE attribute. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="TYPE" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">TYPE (string/O): is used to specify the type of AGENT. It must be one of the following values: +INDIVIDUAL: Use if an individual has served as the agent. +ORGANIZATION: Use if an institution, corporate body, association, non-profit enterprise, government, religious body, etc. has served as the agent. +OTHER: Use OTHER if none of the preceding values pertain and clarify the type of agent specifier being used in the OTHERTYPE attribute + </xsd:documentation> + </xsd:annotation> + <xsd:simpleType> + <xsd:restriction base="xsd:string"> + <xsd:enumeration value="INDIVIDUAL"/> + <xsd:enumeration value="ORGANIZATION"/> + <xsd:enumeration value="OTHER"/> + </xsd:restriction> + </xsd:simpleType> + </xsd:attribute> + <xsd:attribute name="OTHERTYPE" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">OTHERTYPE (string/O): Specifies the type of agent when the value OTHER is indicated in the TYPE attribute. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + </xsd:element> + <xsd:element name="altRecordID" minOccurs="0" maxOccurs="unbounded"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The alternative record identifier element <altRecordID> allows one to use alternative record identifier values for the digital object represented by the METS document; the primary record identifier is stored in the OBJID attribute in the root <mets> element. + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:simpleContent> + <xsd:extension base="xsd:string"> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="TYPE" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">TYPE (string/O): A description of the identifier type (e.g., OCLC record number, LCCN, etc.). + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:extension> + </xsd:simpleContent> + </xsd:complexType> + </xsd:element> + <xsd:element name="metsDocumentID" minOccurs="0"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The metsDocument identifier element <metsDocumentID> allows a unique identifier to be assigned to the METS document itself. This may be different from the OBJID attribute value in the root <mets> element, which uniquely identifies the entire digital object represented by the METS document. + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:simpleContent> + <xsd:extension base="xsd:string"> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="TYPE" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">TYPE (string/O): A description of the identifier type. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:extension> + </xsd:simpleContent> + </xsd:complexType> + </xsd:element> + </xsd:sequence> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="ADMID" type="xsd:IDREFS" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ADMID (IDREFS/O): Contains the ID attribute values of the <techMD>, <sourceMD>, <rightsMD> and/or <digiprovMD> elements within the <amdSec> of the METS document that contain administrative metadata pertaining to the METS document itself. For more information on using METS IDREFS and IDREF type attributes for internal linking, see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="CREATEDATE" type="xsd:dateTime" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">CREATEDATE (dateTime/O): Records the date/time the METS document was created. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="LASTMODDATE" type="xsd:dateTime" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">LASTMODDATE (dateTime/O): Is used to indicate the date/time the METS document was last modified. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="RECORDSTATUS" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">RECORDSTATUS (string/O): Specifies the status of the METS document. It is used for internal processing purposes. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + </xsd:element> + <xsd:element name="dmdSec" type="mdSecType" minOccurs="0" maxOccurs="unbounded"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + A descriptive metadata section <dmdSec> records descriptive metadata pertaining to the METS object as a whole or one of its components. The <dmdSec> element conforms to same generic datatype as the <techMD>, <rightsMD>, <sourceMD> and <digiprovMD> elements, and supports the same sub-elements and attributes. A descriptive metadata element can either wrap the metadata (mdWrap) or reference it in an external location (mdRef) or both. METS allows multiple <dmdSec> elements; and descriptive metadata can be associated with any METS element that supports a DMDID attribute. Descriptive metadata can be expressed according to many current description standards (i.e., MARC, MODS, Dublin Core, TEI Header, EAD, VRA, FGDC, DDI) or a locally produced XML schema. + </xsd:documentation> + </xsd:annotation> + </xsd:element> + <xsd:element name="amdSec" type="amdSecType" minOccurs="0" maxOccurs="unbounded"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The administrative metadata section <amdSec> contains the administrative metadata pertaining to the digital object, its components and any original source material from which the digital object is derived. The <amdSec> is separated into four sub-sections that accommodate technical metadata (techMD), intellectual property rights (rightsMD), analog/digital source metadata (sourceMD), and digital provenance metadata (digiprovMD). Each of these subsections can either wrap the metadata (mdWrap) or reference it in an external location (mdRef) or both. Multiple instances of the <amdSec> element can occur within a METS document and multiple instances of its subsections can occur in one <amdSec> element. This allows considerable flexibility in the structuring of the administrative metadata. METS does not define a vocabulary or syntax for encoding administrative metadata. Administrative metadata can be expressed within the amdSec sub-elements according to many current community defined standards, or locally produced XML schemas. </xsd:documentation> + </xsd:annotation> + </xsd:element> + <xsd:element name="fileSec" minOccurs="0"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The overall purpose of the content file section element <fileSec> is to provide an inventory of and the location for the content files that comprise the digital object being described in the METS document. + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:sequence> + <xsd:element name="fileGrp" maxOccurs="unbounded"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + A sequence of file group elements <fileGrp> can be used group the digital files comprising the content of a METS object either into a flat arrangement or, because each file group element can itself contain one or more file group elements, into a nested (hierarchical) arrangement. In the case where the content files are images of different formats and resolutions, for example, one could group the image content files by format and create a separate <fileGrp> for each image format/resolution such as: +-- one <fileGrp> for the thumbnails of the images +-- one <fileGrp> for the higher resolution JPEGs of the image +-- one <fileGrp> for the master archival TIFFs of the images +For a text resource with a variety of content file types one might group the content files at the highest level by type, and then use the <fileGrp> element’s nesting capabilities to subdivide a <fileGrp> by format within the type, such as: +-- one <fileGrp> for all of the page images with nested <fileGrp> elements for each image format/resolution (tiff, jpeg, gif) +-- one <fileGrp> for a PDF version of all the pages of the document +-- one <fileGrp> for a TEI encoded XML version of the entire document or each of its pages. +A <fileGrp> may contain zero or more <fileGrp> elements and or <file> elements. + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:complexContent> + <xsd:extension base="fileGrpType"/> + </xsd:complexContent> + </xsd:complexType> + </xsd:element> + </xsd:sequence> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + </xsd:element> + <xsd:element name="structMap" type="structMapType" maxOccurs="unbounded"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The structural map section <structMap> is the heart of a METS document. It provides a means for organizing the digital content represented by the <file> elements in the <fileSec> of the METS document into a coherent hierarchical structure. Such a hierarchical structure can be presented to users to facilitate their comprehension and navigation of the digital content. It can further be applied to any purpose requiring an understanding of the structural relationship of the content files or parts of the content files. The organization may be specified to any level of granularity (intellectual and or physical) that is desired. Since the <structMap> element is repeatable, more than one organization can be applied to the digital content represented by the METS document. The hierarchical structure specified by a <structMap> is encoded as a tree of nested <div> elements. A <div> element may directly point to content via child file pointer <fptr> elements (if the content is represented in the <fileSec<) or child METS pointer <mptr> elements (if the content is represented by an external METS document). The <fptr> element may point to a single whole <file> element that manifests its parent <div<, or to part of a <file> that manifests its <div<. It can also point to multiple files or parts of files that must be played/displayed either in sequence or in parallel to reveal its structural division. In addition to providing a means for organizing content, the <structMap> provides a mechanism for linking content at any hierarchical level with relevant descriptive and administrative metadata. + </xsd:documentation> + </xsd:annotation> + </xsd:element> + <xsd:element name="structLink" minOccurs="0"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The structural link section element <structLink> allows for the specification of hyperlinks between the different components of a METS structure that are delineated in a structural map. This element is a container for a single, repeatable element, <smLink> which indicates a hyperlink between two nodes in the structural map. The <structLink> section in the METS document is identified using its XML ID attributes. + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:complexContent> + <xsd:extension base="structLinkType"/> + </xsd:complexContent> + </xsd:complexType> + </xsd:element> + <xsd:element name="behaviorSec" type="behaviorSecType" minOccurs="0" maxOccurs="unbounded"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + A behavior section element <behaviorSec> associates executable behaviors with content in the METS document by means of a repeatable behavior <behavior> element. This element has an interface definition <interfaceDef> element that represents an abstract definition of the set of behaviors represented by a particular behavior section. A <behavior> element also has a <mechanism> element which is used to point to a module of executable code that implements and runs the behavior defined by the interface definition. The <behaviorSec> element, which is repeatable as well as nestable, can be used to group individual behaviors within the structure of the METS document. Such grouping can be useful for organizing families of behaviors together or to indicate other relationships between particular behaviors.</xsd:documentation> + </xsd:annotation> + </xsd:element> + </xsd:sequence> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="OBJID" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">OBJID (string/O): Is the primary identifier assigned to the METS object as a whole. Although this attribute is not required, it is strongly recommended. This identifier is used to tag the entire METS object to external systems, in contrast with the ID identifier. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="LABEL" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">LABEL (string/O): Is a simple title string used to identify the object/entity being described in the METS document for the user. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="TYPE" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">TYPE (string/O): Specifies the class or type of the object, e.g.: book, journal, stereograph, dataset, video, etc. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="PROFILE" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">PROFILE (string/O): Indicates to which of the registered profile(s) the METS document conforms. For additional information about PROFILES see Chapter 5 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + <xsd:complexType name="amdSecType"> + <xsd:annotation> + <xsd:documentation xml:lang="en">amdSecType: Complex Type for Administrative Metadata Sections + The administrative metadata section consists of four possible subsidiary sections: techMD (technical metadata for text/image/audio/video files), rightsMD (intellectual property rights metadata), sourceMD (analog/digital source metadata), and digiprovMD (digital provenance metadata, that is, the history of migrations/translations performed on a digital library object from it's original digital capture/encoding). + </xsd:documentation> + </xsd:annotation> + <xsd:sequence> + <xsd:element name="techMD" type="mdSecType" minOccurs="0" maxOccurs="unbounded"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + A technical metadata element <techMD> records technical metadata about a component of the METS object, such as a digital content file. The <techMD> element conforms to same generic datatype as the <dmdSec>, <rightsMD>, <sourceMD> and <digiprovMD> elements, and supports the same sub-elements and attributes. A technical metadata element can either wrap the metadata (mdWrap) or reference it in an external location (mdRef) or both. METS allows multiple <techMD> elements; and technical metadata can be associated with any METS element that supports an ADMID attribute. Technical metadata can be expressed according to many current technical description standards (such as MIX and textMD) or a locally produced XML schema. + </xsd:documentation> + </xsd:annotation> + </xsd:element> + <xsd:element name="rightsMD" type="mdSecType" minOccurs="0" maxOccurs="unbounded"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + An intellectual property rights metadata element <rightsMD> records information about copyright and licensing pertaining to a component of the METS object. The <rightsMD> element conforms to same generic datatype as the <dmdSec>, <techMD>, <sourceMD> and <digiprovMD> elements, and supports the same sub-elements and attributes. A rights metadata element can either wrap the metadata (mdWrap) or reference it in an external location (mdRef) or both. METS allows multiple <rightsMD> elements; and rights metadata can be associated with any METS element that supports an ADMID attribute. Rights metadata can be expressed according current rights description standards (such as CopyrightMD and rightsDeclarationMD) or a locally produced XML schema. + </xsd:documentation> + </xsd:annotation> + </xsd:element> + <xsd:element name="sourceMD" type="mdSecType" minOccurs="0" maxOccurs="unbounded"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + A source metadata element <sourceMD> records descriptive and administrative metadata about the source format or media of a component of the METS object such as a digital content file. It is often used for discovery, data administration or preservation of the digital object. The <sourceMD> element conforms to same generic datatype as the <dmdSec>, <techMD>, <rightsMD>, and <digiprovMD> elements, and supports the same sub-elements and attributes. A source metadata element can either wrap the metadata (mdWrap) or reference it in an external location (mdRef) or both. METS allows multiple <sourceMD> elements; and source metadata can be associated with any METS element that supports an ADMID attribute. Source metadata can be expressed according to current source description standards (such as PREMIS) or a locally produced XML schema. + </xsd:documentation> + </xsd:annotation> + </xsd:element> + <xsd:element name="digiprovMD" type="mdSecType" minOccurs="0" maxOccurs="unbounded"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + A digital provenance metadata element <digiprovMD> can be used to record any preservation-related actions taken on the various files which comprise a digital object (e.g., those subsequent to the initial digitization of the files such as transformation or migrations) or, in the case of born digital materials, the files’ creation. In short, digital provenance should be used to record information that allows both archival/library staff and scholars to understand what modifications have been made to a digital object and/or its constituent parts during its life cycle. This information can then be used to judge how those processes might have altered or corrupted the object’s ability to accurately represent the original item. One might, for example, record master derivative relationships and the process by which those derivations have been created. Or the <digiprovMD> element could contain information regarding the migration/transformation of a file from its original digitization (e.g., OCR, TEI, etc.,)to its current incarnation as a digital object (e.g., JPEG2000). The <digiprovMD> element conforms to same generic datatype as the <dmdSec>, <techMD>, <rightsMD>, and <sourceMD> elements, and supports the same sub-elements and attributes. A digital provenance metadata element can either wrap the metadata (mdWrap) or reference it in an external location (mdRef) or both. METS allows multiple <digiprovMD> elements; and digital provenance metadata can be associated with any METS element that supports an ADMID attribute. Digital provenance metadata can be expressed according to current digital provenance description standards (such as PREMIS) or a locally produced XML schema. + </xsd:documentation> + </xsd:annotation> + </xsd:element> + </xsd:sequence> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + <xsd:complexType name="fileGrpType"> + <xsd:annotation> + <xsd:documentation xml:lang="en">fileGrpType: Complex Type for File Groups + The file group is used to cluster all of the digital files composing a digital library object in a hierarchical arrangement (fileGrp is recursively defined to enable the creation of the hierarchy). Any file group may contain zero or more file elements. File elements in turn can contain one or more FLocat elements (a pointer to a file containing content for this object) and/or a FContent element (the contents of the file, in either XML or Base64 encoding). + </xsd:documentation> + </xsd:annotation> + <xsd:choice> + <xsd:element name="fileGrp" type="fileGrpType" minOccurs="0" maxOccurs="unbounded"/> + <xsd:element name="file" minOccurs="0" maxOccurs="unbounded" type="fileType" > + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The file element <file> provides access to the content files for the digital object being described by the METS document. A <file> element may contain one or more <FLocat> elements which provide pointers to a content file and/or a <FContent> element which wraps an encoded version of the file. Embedding files using <FContent> can be a valuable feature for exchanging digital objects between repositories or for archiving versions of digital objects for off-site storage. All <FLocat> and <FContent> elements should identify and/or contain identical copies of a single file. The <file> element is recursive, thus allowing sub-files or component files of a larger file to be listed in the inventory. Alternatively, by using the <stream> element, a smaller component of a file or of a related file can be placed within a <file> element. Finally, by using the <transformFile> element, it is possible to include within a <file> element a different version of a file that has undergone a transformation for some reason, such as format migration. + </xsd:documentation> + </xsd:annotation> + </xsd:element> + </xsd:choice> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="VERSDATE" type="xsd:dateTime" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">VERSDATE (dateTime/O): An optional dateTime attribute specifying the date this version/fileGrp of the digital object was created. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="ADMID" type="xsd:IDREFS" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ADMID (IDREF/O): Contains the ID attribute values of the <techMD>, <sourceMD>, <rightsMD> and/or <digiprovMD> elements within the <amdSec> of the METS document applicable to all of the files in a particular file group. For more information on using METS IDREFS and IDREF type attributes for internal linking, see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="USE" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">USE (string/O): A tagging attribute to indicate the intended use of files within this file group (e.g., master, reference, thumbnails for image files). A USE attribute can be expressed at the<fileGrp> level, the <file> level, the <FLocat> level and/or the <FContent> level. A USE attribute value at the <fileGrp> level should pertain to all of the files in the <fileGrp>. A USE attribute at the <file> level should pertain to all copies of the file as represented by subsidiary <FLocat> and/or <FContent> elements. A USE attribute at the <FLocat> or <FContent> level pertains to the particular copy of the file that is either referenced (<FLocat>) or wrapped (<FContent>). + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + <xsd:complexType name="structMapType"> + <xsd:annotation> + <xsd:documentation xml:lang="en">structMapType: Complex Type for Structural Maps + The structural map (structMap) outlines a hierarchical structure for the original object being encoded, using a series of nested div elements. + </xsd:documentation> + </xsd:annotation> + <xsd:sequence> + <xsd:element name="div" type="divType"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The structural divisions of the hierarchical organization provided by a <structMap> are represented by division <div> elements, which can be nested to any depth. Each <div> element can represent either an intellectual (logical) division or a physical division. Every <div> node in the structural map hierarchy may be connected (via subsidiary <mptr> or <fptr> elements) to content files which represent that div's portion of the whole document. + </xsd:documentation> + </xsd:annotation> + </xsd:element> + </xsd:sequence> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="TYPE" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">TYPE (string/O): Identifies the type of structure represented by the <structMap>. For example, a <structMap> that represented a purely logical or intellectual structure could be assigned a TYPE value of “logical†whereas a <structMap> that represented a purely physical structure could be assigned a TYPE value of “physicalâ€. However, the METS schema neither defines nor requires a common vocabulary for this attribute. A METS profile, however, may well constrain the values for the <structMap> TYPE. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="LABEL" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">LABEL (string/O): Describes the <structMap> to viewers of the METS document. This would be useful primarily where more than one <structMap> is provided for a single object. A descriptive LABEL value, in that case, could clarify to users the purpose of each of the available structMaps. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + <xsd:complexType name="divType"> + + <xsd:annotation> + <xsd:documentation xml:lang="en">divType: Complex Type for Divisions + The METS standard represents a document structurally as a series of nested div elements, that is, as a hierarchy (e.g., a book, which is composed of chapters, which are composed of subchapters, which are composed of text). Every div node in the structural map hierarchy may be connected (via subsidiary mptr or fptr elements) to content files which represent that div's portion of the whole document. + +SPECIAL NOTE REGARDING DIV ATTRIBUTE VALUES: +to clarify the differences between the ORDER, ORDERLABEL, and LABEL attributes for the <div> element, imagine a text with 10 roman numbered pages followed by 10 arabic numbered pages. Page iii would have an ORDER of "3", an ORDERLABEL of "iii" and a LABEL of "Page iii", while page 3 would have an ORDER of "13", an ORDERLABEL of "3" and a LABEL of "Page 3". + </xsd:documentation> + </xsd:annotation> + <xsd:sequence> + <xsd:element name="mptr" minOccurs="0" maxOccurs="unbounded"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + Like the <fptr> element, the METS pointer element <mptr> represents digital content that manifests its parent <div> element. Unlike the <fptr>, which either directly or indirectly points to content represented in the <fileSec> of the parent METS document, the <mptr> element points to content represented by an external METS document. Thus, this element allows multiple discrete and separate METS documents to be organized at a higher level by a separate METS document. For example, METS documents representing the individual issues in the series of a journal could be grouped together and organized by a higher level METS document that represents the entire journal series. Each of the <div> elements in the <structMap> of the METS document representing the journal series would point to a METS document representing an issue. It would do so via a child <mptr> element. Thus the <mptr> element gives METS users considerable flexibility in managing the depth of the <structMap> hierarchy of individual METS documents. The <mptr> element points to an external METS document by means of an xlink:href attribute and associated XLink attributes. + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attributeGroup ref="LOCATION"/> + <xsd:attributeGroup ref="xlink:simpleLink"/> + <xsd:attribute name="CONTENTIDS" type="URIs" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">CONTENTIDS (URI/O): Content IDs for the content represented by the <mptr> (equivalent to DIDL DII or Digital Item Identifier, a unique external ID). + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + </xsd:element> + <xsd:element name="fptr" minOccurs="0" maxOccurs="unbounded"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The <fptr> or file pointer element represents digital content that manifests its parent <div> element. The content represented by an <fptr> element must consist of integral files or parts of files that are represented by <file> elements in the <fileSec>. Via its FILEID attribute, an <fptr> may point directly to a single integral <file> element that manifests a structural division. However, an <fptr> element may also govern an <area> element, a <par>, or a <seq> which in turn would point to the relevant file or files. A child <area> element can point to part of a <file> that manifests a division, while the <par> and <seq> elements can point to multiple files or parts of files that together manifest a division. More than one <fptr> element can be associated with a <div> element. Typically sibling <fptr> elements represent alternative versions, or manifestations, of the same content + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:choice> + <xsd:element name="par" type="parType" minOccurs="0"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The <par> or parallel files element aggregates pointers to files, parts of files, and/or sequences of files or parts of files that must be played or displayed simultaneously to manifest a block of digital content represented by an <fptr> element. This might be the case, for example, with multi-media content, where a still image might have an accompanying audio track that comments on the still image. In this case, a <par> element would aggregate two <area> elements, one of which pointed to the image file and one of which pointed to the audio file that must be played in conjunction with the image. The <area> element associated with the image could be further qualified with SHAPE and COORDS attributes if only a portion of the image file was pertinent and the <area> element associated with the audio file could be further qualified with BETYPE, BEGIN, EXTTYPE, and EXTENT attributes if only a portion of the associated audio file should be played in conjunction with the image. + </xsd:documentation> + </xsd:annotation> + </xsd:element> + <xsd:element name="seq" type="seqType" minOccurs="0"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The sequence of files element <seq> aggregates pointers to files, parts of files and/or parallel sets of files or parts of files that must be played or displayed sequentially to manifest a block of digital content. This might be the case, for example, if the parent <div> element represented a logical division, such as a diary entry, that spanned multiple pages of a diary and, hence, multiple page image files. In this case, a <seq> element would aggregate multiple, sequentially arranged <area> elements, each of which pointed to one of the image files that must be presented sequentially to manifest the entire diary entry. If the diary entry started in the middle of a page, then the first <area> element (representing the page on which the diary entry starts) might be further qualified, via its SHAPE and COORDS attributes, to specify the specific, pertinent area of the associated image file. + </xsd:documentation> + </xsd:annotation> + </xsd:element> + <xsd:element name="area" type="areaType" minOccurs="0"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The area element <area> typically points to content consisting of just a portion or area of a file represented by a <file> element in the <fileSec>. In some contexts, however, the <area> element can also point to content represented by an integral file. A single <area> element would appear as the direct child of a <fptr> element when only a portion of a <file>, rather than an integral <file>, manifested the digital content represented by the <fptr>. Multiple <area> elements would appear as the direct children of a <par> element or a <seq> element when multiple files or parts of files manifested the digital content represented by an <fptr> element. When used in the context of a <par> or <seq> element an <area> element can point either to an integral file or to a segment of a file as necessary. + </xsd:documentation> + </xsd:annotation> + </xsd:element> + </xsd:choice> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="FILEID" type="xsd:IDREF" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">FILEID (IDREF/O): An optional attribute that provides the XML ID identifying the <file> element that links to and/or contains the digital content represented by the <fptr>. A <fptr> element should only have a FILEID attribute value if it does not have a child <area>, <par> or <seq> element. If it has a child element, then the responsibility for pointing to the relevant content falls to this child element or its descendants. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="CONTENTIDS" type="URIs" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">CONTENTIDS (URI/O): Content IDs for the content represented by the <fptr> (equivalent to DIDL DII or Digital Item Identifier, a unique external ID). + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + </xsd:element> + <xsd:element name="div" type="divType" minOccurs="0" maxOccurs="unbounded"/> + </xsd:sequence> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="ORDER" type="xsd:integer" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ORDER (integer/O): A representation of the div's order among its siblings (e.g., its absolute, numeric sequence). For an example, and clarification of the distinction between ORDER and ORDERLABEL, see the description of the ORDERLABEL attribute. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="ORDERLABEL" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ORDERLABEL (string/O): A representation of the div's order among its siblings (e.g., “xiiâ€), or of any non-integer native numbering system. It is presumed that this value will still be machine actionable (e.g., it would support ‘go to page ___’ function), and it should not be used as a replacement/substitute for the LABEL attribute. To understand the differences between ORDER, ORDERLABEL and LABEL, imagine a text with 10 roman numbered pages followed by 10 arabic numbered pages. Page iii would have an ORDER of “3â€, an ORDERLABEL of “iii†and a LABEL of “Page iiiâ€, while page 3 would have an ORDER of “13â€, an ORDERLABEL of “3†and a LABEL of “Page 3â€. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="LABEL" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">LABEL (string/O): An attribute used, for example, to identify a <div> to an end user viewing the document. Thus a hierarchical arrangement of the <div> LABEL values could provide a table of contents to the digital content represented by a METS document and facilitate the users’ navigation of the digital object. Note that a <div> LABEL should be specific to its level in the structural map. In the case of a book with chapters, the book <div> LABEL should have the book title and the chapter <div>; LABELs should have the individual chapter titles, rather than having the chapter <div> LABELs combine both book title and chapter title . For further of the distinction between LABEL and ORDERLABEL see the description of the ORDERLABEL attribute. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="DMDID" type="xsd:IDREFS" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">DMDID (IDREFS/O): Contains the ID attribute values identifying the <dmdSec>, elements in the METS document that contain or link to descriptive metadata pertaining to the structural division represented by the current <div> element. For more information on using METS IDREFS and IDREF type attributes for internal linking, see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="ADMID" type="xsd:IDREFS" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ADMID (IDREFS/O): Contains the ID attribute values identifying the <rightsMD>, <sourceMD>, <techMD> and/or <digiprovMD> elements within the <amdSec> of the METS document that contain or link to administrative metadata pertaining to the structural division represented by the <div> element. Typically the <div> ADMID attribute would be used to identify the <rightsMD> element or elements that pertain to the <div>, but it could be used anytime there was a need to link a <div> with pertinent administrative metadata. For more information on using METS IDREFS and IDREF type attributes for internal linking, see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="TYPE" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">TYPE (string/O): An attribute that specifies the type of structural division that the <div> element represents. Possible <div> TYPE attribute values include: chapter, article, page, track, segment, section etc. METS places no constraints on the possible TYPE values. Suggestions for controlled vocabularies for TYPE may be found on the METS website. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="CONTENTIDS" type="URIs" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">CONTENTIDS (URI/O): Content IDs for the content represented by the <div> (equivalent to DIDL DII or Digital Item Identifier, a unique external ID). + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute ref="xlink:label"> + <xsd:annotation> + <xsd:documentation xml:lang="en">xlink:label - an xlink label to be referred to by an smLink element</xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + <xsd:complexType name="parType"> + <xsd:annotation> + <xsd:documentation xml:lang="en">parType: Complex Type for Parallel Files + The <par> or parallel files element aggregates pointers to files, parts of files, and/or sequences of files or parts of files that must be played or displayed simultaneously to manifest a block of digital content represented by an <fptr> element. + </xsd:documentation> + </xsd:annotation> + <xsd:choice maxOccurs="unbounded"> + <xsd:element name="area" type="areaType" minOccurs="0"/> + <xsd:element name="seq" type="seqType" minOccurs="0"/> + </xsd:choice> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + <xsd:complexType name="seqType"> + <xsd:annotation> + <xsd:documentation xml:lang="en">seqType: Complex Type for Sequences of Files + The seq element should be used to link a div to a set of content files when those files should be played/displayed sequentially to deliver content to a user. Individual <area> subelements within the seq element provide the links to the files or portions thereof. + </xsd:documentation> + </xsd:annotation> + <xsd:choice maxOccurs="unbounded"> + <xsd:element name="area" type="areaType" minOccurs="0"/> + <xsd:element name="par" type="parType" minOccurs="0"/> + </xsd:choice> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + <xsd:complexType name="areaType"> + <xsd:annotation> + <xsd:documentation xml:lang="en">areaType: Complex Type for Area Linking + The area element provides for more sophisticated linking between a div element and content files representing that div, be they text, image, audio, or video files. An area element can link a div to a point within a file, to a one-dimension segment of a file (e.g., text segment, image line, audio/video clip), or a two-dimensional section of a file (e.g, subsection of an image, or a subsection of the video display of a video file. The area element has no content; all information is recorded within its various attributes. + </xsd:documentation> + </xsd:annotation> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="FILEID" type="xsd:IDREF" use="required"> + <xsd:annotation> + <xsd:documentation xml:lang="en">FILEID (IDREF/R): An attribute which provides the XML ID value that identifies the <file> element in the <fileSec> that then points to and/or contains the digital content represented by the <area> element. It must contain an ID value represented in an ID attribute associated with a <file> element in the <fileSec> element in the same METS document. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="SHAPE" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">SHAPE (string/O): An attribute that can be used as in HTML to define the shape of the relevant area within the content file pointed to by the <area> element. Typically this would be used with image content (still image or video frame) when only a portion of an integal image map pertains. If SHAPE is specified then COORDS must also be present. SHAPE should be used in conjunction with COORDS in the manner defined for the shape and coords attributes on an HTML4 <area> element. SHAPE must contain one of the following values: +RECT +CIRCLE +POLY + </xsd:documentation> + </xsd:annotation> + <xsd:simpleType> + <xsd:restriction base="xsd:string"> + <xsd:enumeration value="RECT"/> + <xsd:enumeration value="CIRCLE"/> + <xsd:enumeration value="POLY"/> + </xsd:restriction> + </xsd:simpleType> + </xsd:attribute> + <xsd:attribute name="COORDS" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">COORDS (string/O): Specifies the coordinates in an image map for the shape of the pertinent area as specified in the SHAPE attribute. While technically optional, SHAPE and COORDS must both appear together to define the relevant area of image content. COORDS should be used in conjunction with SHAPE in the manner defined for the COORDs and SHAPE attributes on an HTML4 <area> element. COORDS must be a comma delimited string of integer value pairs representing coordinates (plus radius in the case of CIRCLE) within an image map. Number of coordinates pairs depends on shape: RECT: x1, y1, x2, y2; CIRC: x1, y1; POLY: x1, y1, x2, y2, x3, y3 . . . + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="BEGIN" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">BEGIN (string/O): An attribute that specifies the point in the content file where the relevant section of content begins. It can be used in conjunction with either the END attribute or the EXTENT attribute as a means of defining the relevant portion of the referenced file precisely. It can only be interpreted meaningfully in conjunction with the BETYPE or EXTTYPE, which specify the kind of beginning/ending point values or beginning/extent values that are being used. The BEGIN attribute can be used with or without a companion END or EXTENT element. In this case, the end of the content file is assumed to be the end point. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="END" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">END (string/O): An attribute that specifies the point in the content file where the relevant section of content ends. It can only be interpreted meaningfully in conjunction with the BETYPE, which specifies the kind of ending point values being used. Typically the END attribute would only appear in conjunction with a BEGIN element. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="BETYPE" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">BETYPE: Begin/End Type. + BETYPE (string/O): An attribute that specifies the kind of BEGIN and/or END values that are being used. For example, if BYTE is specified, then the BEGIN and END point values represent the byte offsets into a file. If IDREF is specified, then the BEGIN element specifies the ID value that identifies the element in a structured text file where the relevant section of the file begins; and the END value (if present) would specify the ID value that identifies the element with which the relevant section of the file ends. Must be one of the following values: +BYTE +IDREF +SMIL +MIDI +SMPTE-25 +SMPTE-24 +SMPTE-DF30 +SMPTE-NDF30 +SMPTE-DF29.97 +SMPTE-NDF29.97 +TIME +TCF +XPTR + </xsd:documentation> + </xsd:annotation> + <xsd:simpleType> + <xsd:restriction base="xsd:string"> + <xsd:enumeration value="BYTE"/> + <xsd:enumeration value="IDREF"/> + <xsd:enumeration value="SMIL"/> + <xsd:enumeration value="MIDI"/> + <xsd:enumeration value="SMPTE-25"/> + <xsd:enumeration value="SMPTE-24"/> + <xsd:enumeration value="SMPTE-DF30"/> + <xsd:enumeration value="SMPTE-NDF30"/> + <xsd:enumeration value="SMPTE-DF29.97"/> + <xsd:enumeration value="SMPTE-NDF29.97"/> + <xsd:enumeration value="TIME"/> + <xsd:enumeration value="TCF"/> + <xsd:enumeration value="XPTR"/> + </xsd:restriction> + </xsd:simpleType> + </xsd:attribute> + <xsd:attribute name="EXTENT" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">EXTENT (string/O): An attribute that specifies the extent of the relevant section of the content file. Can only be interpreted meaningfully in conjunction with the EXTTYPE which specifies the kind of value that is being used. Typically the EXTENT attribute would only appear in conjunction with a BEGIN element and would not be used if the BEGIN point represents an IDREF. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="EXTTYPE" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">EXTTYPE (string/O): An attribute that specifies the kind of EXTENT values that are being used. For example if BYTE is specified then EXTENT would represent a byte count. If TIME is specified the EXTENT would represent a duration of time. EXTTYPE must be one of the following values: +BYTE +SMIL +MIDI +SMPTE-25 +SMPTE-24 +SMPTE-DF30 +SMPTE-NDF30 +SMPTE-DF29.97 +SMPTE-NDF29.97 +TIME +TCF. + </xsd:documentation> + </xsd:annotation> + <xsd:simpleType> + <xsd:restriction base="xsd:string"> + <xsd:enumeration value="BYTE"/> + <xsd:enumeration value="SMIL"/> + <xsd:enumeration value="MIDI"/> + <xsd:enumeration value="SMPTE-25"/> + <xsd:enumeration value="SMPTE-24"/> + <xsd:enumeration value="SMPTE-DF30"/> + <xsd:enumeration value="SMPTE-NDF30"/> + <xsd:enumeration value="SMPTE-DF29.97"/> + <xsd:enumeration value="SMPTE-NDF29.97"/> + <xsd:enumeration value="TIME"/> + <xsd:enumeration value="TCF"/> + </xsd:restriction> + </xsd:simpleType> + </xsd:attribute> + <xsd:attribute name="ADMID" type="xsd:IDREFS" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ADMID (IDREFS/O): Contains the ID attribute values identifying the <rightsMD>, <sourceMD>, <techMD> and/or <digiprovMD> elements within the <amdSec> of the METS document that contain or link to administrative metadata pertaining to the content represented by the <area> element. Typically the <area> ADMID attribute would be used to identify the <rightsMD> element or elements that pertain to the <area>, but it could be used anytime there was a need to link an <area> with pertinent administrative metadata. For more information on using METS IDREFS and IDREF type attributes for internal linking, see Chapter 4 of the METS Primer + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="CONTENTIDS" type="URIs" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">CONTENTIDS (URI/O): Content IDs for the content represented by the <area> (equivalent to DIDL DII or Digital Item Identifier, a unique external ID). + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + <xsd:complexType name="structLinkType"> + <xsd:annotation> + <xsd:documentation xml:lang="en">structLinkType: Complex Type for Structural Map Linking + The Structural Map Linking section allows for the specification of hyperlinks between different components of a METS structure delineated in a structural map. structLink contains a single, repeatable element, smLink. Each smLink element indicates a hyperlink between two nodes in the structMap. The structMap nodes recorded in smLink are identified using their XML ID attribute values. + </xsd:documentation> + </xsd:annotation> + <xsd:choice maxOccurs="unbounded"> + <xsd:element name="smLink"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The Structural Map Link element <smLink> identifies a hyperlink between two nodes in the structural map. You would use <smLink>, for instance, to note the existence of hypertext links between web pages, if you wished to record those links within METS. NOTE: <smLink> is an empty element. The location of the <smLink> element to which the <smLink> element is pointing MUST be stored in the xlink:href attribute. + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute ref="xlink:arcrole" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + xlink:arcrole - the role of the link, as per the xlink specification. See http://www.w3.org/TR/xlink/ + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute ref="xlink:title" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + xlink:title - a title for the link (if needed), as per the xlink specification. See http://www.w3.org/TR/xlink/ + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute ref="xlink:show" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + xlink:show - see the xlink specification at http://www.w3.org/TR/xlink/ + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute ref="xlink:actuate" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + xlink:actuate - see the xlink specification at http://www.w3.org/TR/xlink/ + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute ref="xlink:to" use="required"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + xlink:to - the value of the label for the element in the structMap you are linking to. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute ref="xlink:from" use="required"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + xlink:from - the value of the label for the element in the structMap you are linking from. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + </xsd:element> + <xsd:element name="smLinkGrp"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The structMap link group element <smLinkGrp> provides an implementation of xlink:extendLink, and provides xlink compliant mechanisms for establishing xlink:arcLink type links between 2 or more <div> elements in <structMap> element(s) occurring within the same METS document or different METS documents. The smLinkGrp could be used as an alternative to the <smLink> element to establish a one-to-one link between <div> elements in the same METS document in a fully xlink compliant manner. However, it can also be used to establish one-to-many or many-to-many links between <div> elements. For example, if a METS document contains two <structMap> elements, one of which represents a purely logical structure and one of which represents a purely physical structure, the <smLinkGrp> element would provide a means of mapping a <div> representing a logical entity (for example, a newspaper article) with multiple <div> elements in the physical <structMap> representing the physical areas that together comprise the logical entity (for example, the <div> elements representing the page areas that together comprise the newspaper article). + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:sequence> + <xsd:element name="smLocatorLink" minOccurs="2" maxOccurs="unbounded" > + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The structMap locator link element <smLocatorLink> is of xlink:type "locator". It provides a means of identifying a <div> element that will participate in one or more of the links specified by means of <smArcLink> elements within the same <smLinkGrp>. The participating <div> element that is represented by the <smLocatorLink> is identified by means of a URI in the associate xlink:href attribute. The lowest level of this xlink:href URI value should be a fragment identifier that references the ID value that identifies the relevant <div> element. For example, "xlink:href='#div20'" where "div20" is the ID value that identifies the pertinent <div> in the current METS document. Although not required by the xlink specification, an <smLocatorLink> element will typically include an xlink:label attribute in this context, as the <smArcLink> elements will reference these labels to establish the from and to sides of each arc link. + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:attribute name="ID" type="xsd:ID"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer.</xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attributeGroup ref="xlink:locatorLink"/> + </xsd:complexType> + </xsd:element> + <xsd:element name="smArcLink" minOccurs="1" maxOccurs="unbounded"> + <xsd:complexType> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The structMap arc link element <smArcLink> is of xlink:type "arc" It can be used to establish a traversal link between two <div> elements as identified by <smLocatorLink> elements within the same smLinkGrp element. The associated xlink:from and xlink:to attributes identify the from and to sides of the arc link by referencing the xlink:label attribute values on the participating smLocatorLink elements. + </xsd:documentation> + </xsd:annotation> + <xsd:attribute name="ID" type="xsd:ID"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer.</xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attributeGroup ref="xlink:arcLink"/> + <xsd:attribute name="ARCTYPE" type="xsd:string"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ARCTYPE (string/O):The ARCTYPE attribute provides a means of specifying the relationship between the <div> elements participating in the arc link, and hence the purpose or role of the link. While it can be considered analogous to the xlink:arcrole attribute, its type is a simple string, rather than anyURI. ARCTYPE has no xlink specified meaning, and the xlink:arcrole attribute should be used instead of or in addition to the ARCTYPE attribute when full xlink compliance is desired with respect to specifying the role or purpose of the arc link. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="ADMID" type="xsd:IDREFS" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ADMID (IDREFS/O): Contains the ID attribute values identifying the <sourceMD>, <techMD>, <digiprovMD> and/or <rightsMD> elements within the <amdSec> of the METS document that contain or link to administrative metadata pertaining to <smArcLink>. Typically the <smArcLink> ADMID attribute would be used to identify one or more <sourceMD> and/or <techMD> elements that refine or clarify the relationship between the xlink:from and xlink:to sides of the arc. For more information on using METS IDREFS and IDREF type attributes for internal linking, see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + </xsd:element> + </xsd:sequence> + <xsd:attribute name="ID" type="xsd:ID"/> + <xsd:attribute name="ARCLINKORDER" default="unordered"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ARCLINKORDER (enumerated string/O): ARCLINKORDER is used to indicate whether the order of the smArcLink elements aggregated by the smLinkGrp element is significant. If the order is significant, then a value of "ordered" should be supplied. Value defaults to "unordered" Note that the ARLINKORDER attribute has no xlink specified meaning.</xsd:documentation> + </xsd:annotation> + <xsd:simpleType> + <xsd:restriction base="xsd:string"> + <xsd:enumeration value="ordered"/> + <xsd:enumeration value="unordered"/> + </xsd:restriction> + </xsd:simpleType> + </xsd:attribute> + <xsd:attributeGroup ref="xlink:extendedLink"/> + </xsd:complexType> + </xsd:element> + </xsd:choice> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + <xsd:complexType name="behaviorSecType"> + <xsd:annotation> + <xsd:documentation xml:lang="en">behaviorSecType: Complex Type for Behavior Sections + Behaviors are executable code which can be associated with parts of a METS object. The behaviorSec element is used to group individual behaviors within a hierarchical structure. Such grouping can be useful to organize families of behaviors together or to indicate other relationships between particular behaviors. + </xsd:documentation> + </xsd:annotation> + <xsd:sequence> + <xsd:element name="behaviorSec" type="behaviorSecType" minOccurs="0" maxOccurs="unbounded"/> + <xsd:element name="behavior" type="behaviorType" minOccurs="0" maxOccurs="unbounded"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + A behavior element <behavior> can be used to associate executable behaviors with content in the METS document. This element has an interface definition <interfaceDef> element that represents an abstract definition of a set of behaviors represented by a particular behavior. A <behavior> element also has a behavior mechanism <mechanism> element, a module of executable code that implements and runs the behavior defined abstractly by the interface definition. + </xsd:documentation> + </xsd:annotation> + </xsd:element> + </xsd:sequence> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="CREATED" type="xsd:dateTime" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">CREATED (dateTime/O): Specifies the date and time of creation for the <behaviorSec> + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="LABEL" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">LABEL (string/O): A text description of the behavior section. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + <xsd:complexType name="behaviorType"> + <xsd:annotation> + <xsd:documentation xml:lang="en">behaviorType: Complex Type for Behaviors + A behavior can be used to associate executable behaviors with content in the METS object. A behavior element has an interface definition element that represents an abstract definition of the set of behaviors represented by a particular behavior. A behavior element also has an behavior mechanism which is a module of executable code that implements and runs the behavior defined abstractly by the interface definition. + </xsd:documentation> + </xsd:annotation> + <xsd:sequence> + <xsd:element name="interfaceDef" type="objectType" minOccurs="0"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The interface definition <interfaceDef> element contains a pointer to an abstract definition of a single behavior or a set of related behaviors that are associated with the content of a METS object. The interface definition object to which the <interfaceDef> element points using xlink:href could be another digital object, or some other entity, such as a text file which describes the interface or a Web Services Description Language (WSDL) file. Ideally, an interface definition object contains metadata that describes a set of behaviors or methods. It may also contain files that describe the intended usage of the behaviors, and possibly files that represent different expressions of the interface definition. + </xsd:documentation> + </xsd:annotation> + </xsd:element> + <xsd:element name="mechanism" type="objectType"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + A mechanism element <mechanism> contains a pointer to an executable code module that implements a set of behaviors defined by an interface definition. The <mechanism> element will be a pointer to another object (a mechanism object). A mechanism object could be another METS object, or some other entity (e.g., a WSDL file). A mechanism object should contain executable code, pointers to executable code, or specifications for binding to network services (e.g., web services). + </xsd:documentation> + </xsd:annotation> + </xsd:element> + </xsd:sequence> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. In the case of a <behavior> element that applies to a <transformFile> element, the ID value must be present and would be referenced from the transformFile/@TRANSFORMBEHAVIOR attribute. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="STRUCTID" type="xsd:IDREFS" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">STRUCTID (IDREFS/O): An XML IDREFS attribute used to link a <behavior> to one or more <div> elements within a <structMap> in the METS document. The content to which the STRUCTID points is considered input to the executable behavior mechanism defined for the behavior. If the <behavior> applies to one or more <div> elements, then the STRUCTID attribute must be present. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="BTYPE" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">BTYPE (string/O): The behavior type provides a means of categorizing the related behavior.</xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="CREATED" type="xsd:dateTime" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">CREATED (dateTime/O): The dateTime of creation for the behavior. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="LABEL" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">LABEL (string/O): A text description of the behavior. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="GROUPID" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">GROUPID (string/O): An identifier that establishes a correspondence between the given behavior and other behaviors, typically used to facilitate versions of behaviors. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="ADMID" type="xsd:IDREFS" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ADMID (IDREFS/O): An optional attribute listing the XML ID values of administrative metadata sections within the METS document pertaining to this behavior. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + <xsd:complexType name="objectType"> + <xsd:annotation> + <xsd:documentation xml:lang="en">objectType: complexType for interfaceDef and mechanism elements + The mechanism and behavior elements point to external objects--an interface definition object or an executable code object respectively--which together constitute a behavior that can be applied to one or more <div> elements in a <structMap>. + </xsd:documentation> + </xsd:annotation> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="LABEL" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">LABEL (string/O): A text description of the entity represented. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attributeGroup ref="LOCATION"/> + <xsd:attributeGroup ref="xlink:simpleLink"/> + </xsd:complexType> + <xsd:complexType name="mdSecType"> + <xsd:annotation> + <xsd:documentation xml:lang="en">mdSecType: Complex Type for Metadata Sections + A generic framework for pointing to/including metadata within a METS document, a la Warwick Framework. + </xsd:documentation> + </xsd:annotation> + <xsd:all> + <xsd:element name="mdRef" minOccurs="0"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The metadata reference element <mdRef> element is a generic element used throughout the METS schema to provide a pointer to metadata which resides outside the METS document. NB: <mdRef> is an empty element. The location of the metadata must be recorded in the xlink:href attribute, supplemented by the XPTR attribute as needed. + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attributeGroup ref="LOCATION"/> + <xsd:attributeGroup ref="xlink:simpleLink"/> + <xsd:attributeGroup ref="METADATA"/> + <xsd:attributeGroup ref="FILECORE"/> + <xsd:attribute name="LABEL" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">LABEL (string/O): Provides a label to display to the viewer of the METS document that identifies the associated metadata. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="XPTR" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">XPTR (string/O): Locates the point within a file to which the <mdRef> element refers, if applicable. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + </xsd:element> + <xsd:element name="mdWrap" minOccurs="0"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + A metadata wrapper element <mdWrap> provides a wrapper around metadata embedded within a METS document. The element is repeatable. Such metadata can be in one of two forms: 1) XML-encoded metadata, with the XML-encoding identifying itself as belonging to a namespace other than the METS document namespace. 2) Any arbitrary binary or textual form, PROVIDED that the metadata is Base64 encoded and wrapped in a <binData> element within the internal descriptive metadata element. + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:choice> + <xsd:element name="binData" type="xsd:base64Binary" minOccurs="0"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The binary data wrapper element <binData> is used to contain Base64 encoded metadata. </xsd:documentation> + </xsd:annotation> + </xsd:element> + <xsd:element name="xmlData" minOccurs="0"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The xml data wrapper element <xmlData> is used to contain XML encoded metadata. The content of an <xmlData> element can be in any namespace or in no namespace. As permitted by the XML Schema Standard, the processContents attribute value for the metadata in an <xmlData> is set to “laxâ€. Therefore, if the source schema and its location are identified by means of an XML schemaLocation attribute, then an XML processor will validate the elements for which it can find declarations. If a source schema is not identified, or cannot be found at the specified schemaLocation, then an XML validator will check for well-formedness, but otherwise skip over the elements appearing in the <xmlData> element. + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:sequence> + <xsd:any namespace="##any" maxOccurs="unbounded" processContents="lax"/> + </xsd:sequence> + </xsd:complexType> + </xsd:element> + </xsd:choice> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attributeGroup ref="METADATA"/> + <xsd:attributeGroup ref="FILECORE"/> + <xsd:attribute name="LABEL" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">LABEL: an optional string attribute providing a label to display to the viewer of the METS document identifying the metadata. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + </xsd:element> + </xsd:all> + <xsd:attribute name="ID" type="xsd:ID" use="required"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/R): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. The ID attribute on the <dmdSec>, <techMD>, <sourceMD>, <rightsMD> and <digiprovMD> elements (which are all of mdSecType) is required, and its value should be referenced from one or more DMDID attributes (when the ID identifies a <dmdSec> element) or ADMID attributes (when the ID identifies a <techMD>, <sourceMD>, <rightsMD> or <digiprovMD> element) that are associated with other elements in the METS document. The following elements support references to a <dmdSec> via a DMDID attribute: <file>, <stream>, <div>. The following elements support references to <techMD>, <sourceMD>, <rightsMD> and <digiprovMD> elements via an ADMID attribute: <metsHdr>, <dmdSec>, <techMD>, <sourceMD>, <rightsMD>, <digiprovMD>, <fileGrp>, <file>, <stream>, <div>, <area>, <behavior>. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="GROUPID" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">GROUPID (string/O): This identifier is used to indicate that different metadata sections may be considered as part of a group. Two metadata sections with the same GROUPID value are to be considered part of the same group. For example this facility might be used to group changed versions of the same metadata if previous versions are maintained in a file for tracking purposes. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="ADMID" type="xsd:IDREFS" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ADMID (IDREFS/O): Contains the ID attribute values of the <digiprovMD>, <techMD>, <sourceMD> and/or <rightsMD> elements within the <amdSec> of the METS document that contain administrative metadata pertaining to the current mdSecType element. Typically used in this context to reference preservation metadata (digiprovMD) which applies to the current metadata. For more information on using METS IDREFS and IDREF type attributes for internal linking, see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="CREATED" type="xsd:dateTime" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">CREATED (dateTime/O): Specifies the date and time of creation for the metadata. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="STATUS" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">STATUS (string/O): Indicates the status of this metadata (e.g., superseded, current, etc.). + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + <xsd:complexType name="fileType"> + <xsd:annotation> + <xsd:documentation xml:lang="en">fileType: Complex Type for Files + The file element provides access to content files for a METS object. A file element may contain one or more FLocat elements, which provide pointers to a content file, and/or an FContent element, which wraps an encoded version of the file. Note that ALL FLocat and FContent elements underneath a single file element should identify/contain identical copies of a single file. + </xsd:documentation> + </xsd:annotation> + + <xsd:sequence> + <xsd:element name="FLocat" minOccurs="0" maxOccurs="unbounded"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The file location element <FLocat> provides a pointer to the location of a content file. It uses the XLink reference syntax to provide linking information indicating the actual location of the content file, along with other attributes specifying additional linking information. NOTE: <FLocat> is an empty element. The location of the resource pointed to MUST be stored in the xlink:href attribute. + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attributeGroup ref="LOCATION"/> + <xsd:attribute name="USE" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">USE (string/O): A tagging attribute to indicate the intended use of the specific copy of the file represented by the <FLocat> element (e.g., service master, archive master). A USE attribute can be expressed at the<fileGrp> level, the <file> level, the <FLocat> level and/or the <FContent> level. A USE attribute value at the <fileGrp> level should pertain to all of the files in the <fileGrp>. A USE attribute at the <file> level should pertain to all copies of the file as represented by subsidiary <FLocat> and/or <FContent> elements. A USE attribute at the <FLocat> or <FContent> level pertains to the particular copy of the file that is either referenced (<FLocat>) or wrapped (<FContent>). + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attributeGroup ref="xlink:simpleLink"/> + </xsd:complexType> + </xsd:element> + <xsd:element name="FContent" minOccurs="0"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The file content element <FContent> is used to identify a content file contained internally within a METS document. The content file must be either Base64 encoded and contained within the subsidiary <binData> wrapper element, or consist of XML information and be contained within the subsidiary <xmlData> wrapper element. + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:choice> + <xsd:element name="binData" type="xsd:base64Binary" minOccurs="0"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + A binary data wrapper element <binData> is used to contain a Base64 encoded file. + </xsd:documentation> + </xsd:annotation> + </xsd:element> + <xsd:element name="xmlData" minOccurs="0"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + An xml data wrapper element <xmlData> is used to contain an XML encoded file. The content of an <xmlData> element can be in any namespace or in no namespace. As permitted by the XML Schema Standard, the processContents attribute value for the metadata in an <xmlData> element is set to “laxâ€. Therefore, if the source schema and its location are identified by means of an xsi:schemaLocation attribute, then an XML processor will validate the elements for which it can find declarations. If a source schema is not identified, or cannot be found at the specified schemaLocation, then an XML validator will check for well-formedness, but otherwise skip over the elements appearing in the <xmlData> element. + </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:sequence> + <xsd:any namespace="##any" maxOccurs="unbounded" processContents="lax"/> + </xsd:sequence> + </xsd:complexType> + </xsd:element> + </xsd:choice> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="USE" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">USE (string/O): A tagging attribute to indicate the intended use of the specific copy of the file represented by the <FContent> element (e.g., service master, archive master). A USE attribute can be expressed at the<fileGrp> level, the <file> level, the <FLocat> level and/or the <FContent> level. A USE attribute value at the <fileGrp> level should pertain to all of the files in the <fileGrp>. A USE attribute at the <file> level should pertain to all copies of the file as represented by subsidiary <FLocat> and/or <FContent> elements. A USE attribute at the <FLocat> or <FContent> level pertains to the particular copy of the file that is either referenced (<FLocat>) or wrapped (<FContent>). + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:complexType> + </xsd:element> + <xsd:element name="stream" minOccurs="0" maxOccurs="unbounded"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + A component byte stream element <stream> may be composed of one or more subsidiary streams. An MPEG4 file, for example, might contain separate audio and video streams, each of which is associated with technical metadata. The repeatable <stream> element provides a mechanism to record the existence of separate data streams within a particular file, and the opportunity to associate <dmdSec> and <amdSec> with those subsidiary data streams if desired. </xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:complexContent> + <xsd:restriction base="xsd:anyType"> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="streamType" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">streamType (string/O): The IANA MIME media type for the bytestream.</xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="OWNERID" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">OWNERID (string/O): Used to provide a unique identifier (which could include a URI) assigned to the file. This identifier may differ from the URI used to retrieve the file. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="ADMID" type="xsd:IDREFS" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ADMID (IDREFS/O): Contains the ID attribute values of the <techMD>, <sourceMD>, <rightsMD> and/or <digiprovMD> elements within the <amdSec> of the METS document that contain administrative metadata pertaining to the bytestream. For more information on using METS IDREFS and IDREF type attributes for internal linking, see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="DMDID" type="xsd:IDREFS" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">DMDID (IDREFS/O): Contains the ID attribute values identifying the <dmdSec>, elements in the METS document that contain or link to descriptive metadata pertaining to the content file stream represented by the current <stream> element. For more information on using METS IDREFS and IDREF type attributes for internal linking, see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="BEGIN" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">BEGIN (string/O): An attribute that specifies the point in the parent <file> where the current <stream> begins. It can be used in conjunction with the END attribute as a means of defining the location of the stream within its parent file. However, the BEGIN attribute can be used with or without a companion END attribute. When no END attribute is specified, the end of the parent file is assumed also to be the end point of the stream. The BEGIN and END attributes can only be interpreted meaningfully in conjunction with a BETYPE attribute, which specifies the kind of beginning/ending point values that are being used. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="END" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">END (string/O): An attribute that specifies the point in the parent <file> where the <stream> ends. It can only be interpreted meaningfully in conjunction with the BETYPE, which specifies the kind of ending point values being used. Typically the END attribute would only appear in conjunction with a BEGIN attribute. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="BETYPE" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">BETYPE: Begin/End Type. + BETYPE (string/O): An attribute that specifies the kind of BEGIN and/or END values that are being used. Currently BYTE is the only valid value that can be used in conjunction with nested <file> or <stream> elements. + </xsd:documentation> + </xsd:annotation> + <xsd:simpleType> + <xsd:restriction base="xsd:string"> + <xsd:enumeration value="BYTE"/> + </xsd:restriction> + </xsd:simpleType> + </xsd:attribute> + </xsd:restriction> + </xsd:complexContent> + </xsd:complexType> + </xsd:element> + <xsd:element name="transformFile" minOccurs="0" maxOccurs="unbounded"> + <xsd:annotation> + <xsd:documentation xml:lang="en"> + The transform file element <transformFile> provides a means to access any subsidiary files listed below a <file> element by indicating the steps required to "unpack" or transform the subsidiary files. This element is repeatable and might provide a link to a <behavior> in the <behaviorSec> that performs the transformation.</xsd:documentation> + </xsd:annotation> + <xsd:complexType> + <xsd:complexContent> + <xsd:restriction base="xsd:anyType"> + <xsd:attribute name="ID" type="xsd:ID" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/O): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="TRANSFORMTYPE" use="required"> + <xsd:annotation> + <xsd:documentation xml:lang="en">TRANSFORMTYPE (string/R): Is used to indicate the type of transformation needed to render content of a file accessible. This may include unpacking a file into subsidiary files/streams. The controlled value constraints for this XML string include “decompression†and “decryptionâ€. Decompression is defined as the action of reversing data compression, i.e., the process of encoding information using fewer bits than an unencoded representation would use by means of specific encoding schemas. Decryption is defined as the process of restoring data that has been obscured to make it unreadable without special knowledge (encrypted data) to its original form. </xsd:documentation> + </xsd:annotation> + <xsd:simpleType> + <xsd:restriction base="xsd:string"> + <xsd:enumeration value="decompression"></xsd:enumeration> + <xsd:enumeration value="decryption"></xsd:enumeration> + </xsd:restriction> + </xsd:simpleType> + </xsd:attribute> + <xsd:attribute name="TRANSFORMALGORITHM" type="xsd:string" use="required"> + <xsd:annotation> + <xsd:documentation xml:lang="en">TRANSFORM-ALGORITHM (string/R): Specifies the decompression or decryption routine used to access the contents of the file. Algorithms for compression can be either loss-less or lossy.</xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="TRANSFORMKEY" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">TRANSFORMKEY (string/O): A key to be used with the transform algorithm for accessing the file’s contents.</xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="TRANSFORMBEHAVIOR" type="xsd:IDREF" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">TRANSFORMBEHAVIOR (string/O): An IDREF to a behavior element for this transformation.</xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="TRANSFORMORDER" type="xsd:positiveInteger" use="required"> + <xsd:annotation> + <xsd:documentation xml:lang="en">TRANSFORMORDER (postive-integer/R): The order in which the instructions must be followed in order to unpack or transform the container file.</xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:restriction> + </xsd:complexContent> + </xsd:complexType> + </xsd:element> + <xsd:element name="file" type="fileType" minOccurs="0" maxOccurs="unbounded"></xsd:element> + </xsd:sequence> + <xsd:attribute name="ID" type="xsd:ID" use="required"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ID (ID/R): This attribute uniquely identifies the element within the METS document, and would allow the element to be referenced unambiguously from another element or document via an IDREF or an XPTR. Typically, the ID attribute value on a <file> element would be referenced from one or more FILEID attributes (which are of type IDREF) on <fptr>and/or <area> elements within the <structMap>. Such references establish links between structural divisions (<div> elements) and the specific content files or parts of content files that manifest them. For more information on using ID attributes for internal and external linking see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="SEQ" type="xsd:int" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">SEQ (integer/O): Indicates the sequence of this <file> relative to the others in its <fileGrp>. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attributeGroup ref="FILECORE"></xsd:attributeGroup> + <xsd:attribute name="OWNERID" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">OWNERID (string/O): A unique identifier assigned to the file by its owner. This may be a URI which differs from the URI used to retrieve the file. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="ADMID" type="xsd:IDREFS" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">ADMID (IDREFS/O): Contains the ID attribute values of the <techMD>, <sourceMD>, <rightsMD> and/or <digiprovMD> elements within the <amdSec> of the METS document that contain administrative metadata pertaining to the file. For more information on using METS IDREFS and IDREF type attributes for internal linking, see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="DMDID" type="xsd:IDREFS" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">DMDID (IDREFS/O): Contains the ID attribute values identifying the <dmdSec>, elements in the METS document that contain or link to descriptive metadata pertaining to the content file represented by the current <file> element. For more information on using METS IDREFS and IDREF type attributes for internal linking, see Chapter 4 of the METS Primer. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="GROUPID" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">GROUPID (string/O): An identifier that establishes a correspondence between this file and files in other file groups. Typically, this will be used to associate a master file in one file group with the derivative files made from it in other file groups. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="USE" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">USE (string/O): A tagging attribute to indicate the intended use of all copies of the file aggregated by the <file> element (e.g., master, reference, thumbnails for image files). A USE attribute can be expressed at the<fileGrp> level, the <file> level, the <FLocat> level and/or the <FContent> level. A USE attribute value at the <fileGrp> level should pertain to all of the files in the <fileGrp>. A USE attribute at the <file> level should pertain to all copies of the file as represented by subsidiary <FLocat> and/or <FContent> elements. A USE attribute at the <FLocat> or <FContent> level pertains to the particular copy of the file that is either referenced (<FLocat>) or wrapped (<FContent>). + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="BEGIN" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">BEGIN (string/O): An attribute that specifies the point in the parent <file> where the current <file> begins. When used in conjunction with a <file> element, this attribute is only meaningful when this element is nested, and its parent <file> element represents a container file. It can be used in conjunction with the END attribute as a means of defining the location of the current file within its parent file. However, the BEGIN attribute can be used with or without a companion END attribute. When no END attribute is specified, the end of the parent file is assumed also to be the end point of the current file. The BEGIN and END attributes can only be interpreted meaningfully in conjunction with a BETYPE attribute, which specifies the kind of beginning/ending point values that are being used. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="END" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">END (string/O): An attribute that specifies the point in the parent <file> where the current, nested <file> ends. It can only be interpreted meaningfully in conjunction with the BETYPE, which specifies the kind of ending point values being used. Typically the END attribute would only appear in conjunction with a BEGIN attribute. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="BETYPE" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">BETYPE: Begin/End Type. + BETYPE (string/O): An attribute that specifies the kind of BEGIN and/or END values that are being used. Currently BYTE is the only valid value that can be used in conjunction with nested <file> or <stream> elements. + </xsd:documentation> + </xsd:annotation> + <xsd:simpleType> + <xsd:restriction base="xsd:string"> + <xsd:enumeration value="BYTE"/> + </xsd:restriction> + </xsd:simpleType> + </xsd:attribute> + </xsd:complexType> + + <xsd:simpleType name="URIs"> + <xsd:list itemType="xsd:anyURI"/> + </xsd:simpleType> + + <xsd:attributeGroup name="METADATA"> + <xsd:attribute name="MDTYPE" use="required"> + <xsd:annotation> + <xsd:documentation xml:lang="en">MDTYPE (string/R): Is used to indicate the type of the associated metadata. It must have one of the following values: +MARC: any form of MARC record +MODS: metadata in the Library of Congress MODS format +EAD: Encoded Archival Description finding aid +DC: Dublin Core +NISOIMG: NISO Technical Metadata for Digital Still Images +LC-AV: technical metadata specified in the Library of Congress A/V prototyping project +VRA: Visual Resources Association Core +TEIHDR: Text Encoding Initiative Header +DDI: Data Documentation Initiative +FGDC: Federal Geographic Data Committee metadata +LOM: Learning Object Model +PREMIS: PREservation Metadata: Implementation Strategies +PREMIS:OBJECT: PREMIS Object entiry +PREMIS:AGENT: PREMIS Agent entity +PREMIS:RIGHTS: PREMIS Rights entity +PREMIS:EVENT: PREMIS Event entity +TEXTMD: textMD Technical metadata for text +METSRIGHTS: Rights Declaration Schema +ISO 19115:2003 NAP: North American Profile of ISO 19115:2003 descriptive metadata +EAC-CPF: Encoded Archival Context - Corporate Bodies, Persons, and Families +OTHER: metadata in a format not specified above + </xsd:documentation> + </xsd:annotation> + <xsd:simpleType> + <xsd:restriction base="xsd:string"> + <xsd:enumeration value="MARC"/> + <xsd:enumeration value="MODS"/> + <xsd:enumeration value="EAD"/> + <xsd:enumeration value="DC"/> + <xsd:enumeration value="NISOIMG"/> + <xsd:enumeration value="LC-AV"/> + <xsd:enumeration value="VRA"/> + <xsd:enumeration value="TEIHDR"/> + <xsd:enumeration value="DDI"/> + <xsd:enumeration value="FGDC"/> + <xsd:enumeration value="LOM"/> + <xsd:enumeration value="PREMIS"/> + <xsd:enumeration value="PREMIS:OBJECT"/> + <xsd:enumeration value="PREMIS:AGENT"/> + <xsd:enumeration value="PREMIS:RIGHTS"/> + <xsd:enumeration value="PREMIS:EVENT"/> + <xsd:enumeration value="TEXTMD"/> + <xsd:enumeration value="METSRIGHTS"/> + <xsd:enumeration value="ISO 19115:2003 NAP"/> + <xsd:enumeration value="EAC-CPF"></xsd:enumeration> + <xsd:enumeration value="OTHER"/> + </xsd:restriction> + </xsd:simpleType> + </xsd:attribute> + <xsd:attribute name="OTHERMDTYPE" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">OTHERMDTYPE (string/O): Specifies the form of metadata in use when the value OTHER is indicated in the MDTYPE attribute. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="MDTYPEVERSION" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">MDTYPEVERSION(string/O): Provides a means for recording the version of the type of metadata (as recorded in the MDTYPE or OTHERMDTYPE attribute) that is being used. This may represent the version of the underlying data dictionary or metadata model rather than a schema version. </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:attributeGroup> + <xsd:attributeGroup name="LOCATION"> + <xsd:attribute name="LOCTYPE" use="required"> + <xsd:annotation> + <xsd:documentation xml:lang="en">LOCTYPE (string/R): Specifies the locator type used in the xlink:href attribute. Valid values for LOCTYPE are: + ARK + URN + URL + PURL + HANDLE + DOI + OTHER + </xsd:documentation> + </xsd:annotation> + <xsd:simpleType> + <xsd:restriction base="xsd:string"> + <xsd:enumeration value="ARK"/> + <xsd:enumeration value="URN"/> + <xsd:enumeration value="URL"/> + <xsd:enumeration value="PURL"/> + <xsd:enumeration value="HANDLE"/> + <xsd:enumeration value="DOI"/> + <xsd:enumeration value="OTHER"/> + </xsd:restriction> + </xsd:simpleType> + </xsd:attribute> + <xsd:attribute name="OTHERLOCTYPE" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">OTHERLOCTYPE (string/O): Specifies the locator type when the value OTHER is used in the LOCTYPE attribute. Although optional, it is strongly recommended when OTHER is used. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + </xsd:attributeGroup> + <xsd:attributeGroup name="FILECORE"> + <xsd:attribute name="MIMETYPE" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">MIMETYPE (string/O): The IANA MIME media type for the associated file or wrapped content. Some values for this attribute can be found on the IANA website. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="SIZE" type="xsd:long" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">SIZE (long/O): Specifies the size in bytes of the associated file or wrapped content. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="CREATED" type="xsd:dateTime" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">CREATED (dateTime/O): Specifies the date and time of creation for the associated file or wrapped content. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="CHECKSUM" type="xsd:string" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">CHECKSUM (string/O): Provides a checksum value for the associated file or wrapped content. + </xsd:documentation> + </xsd:annotation> + </xsd:attribute> + <xsd:attribute name="CHECKSUMTYPE" use="optional"> + <xsd:annotation> + <xsd:documentation xml:lang="en">CHECKSUMTYPE (enumerated string/O): Specifies the checksum algorithm used to produce the value contained in the CHECKSUM attribute. CHECKSUMTYPE must contain one of the following values: + Adler-32 + CRC32 + HAVAL + MD5 + MNP + SHA-1 + SHA-256 + SHA-384 + SHA-512 + TIGER + WHIRLPOOL + </xsd:documentation> + </xsd:annotation> + <xsd:simpleType> + <xsd:restriction base="xsd:string"> + <xsd:enumeration value="Adler-32"/> + <xsd:enumeration value="CRC32"/> + <xsd:enumeration value="HAVAL"/> + <xsd:enumeration value="MD5"/> + <xsd:enumeration value="MNP"/> + <xsd:enumeration value="SHA-1"/> + <xsd:enumeration value="SHA-256"/> + <xsd:enumeration value="SHA-384"/> + <xsd:enumeration value="SHA-512"/> + <xsd:enumeration value="TIGER"/> + <xsd:enumeration value="WHIRLPOOL"/> + </xsd:restriction> + </xsd:simpleType> + </xsd:attribute> + </xsd:attributeGroup> +</xsd:schema> diff --git a/src/archivematicaCommon/lib/externals/plantUML/plantuml.jar b/src/archivematicaCommon/lib/externals/plantUML/plantuml.jar new file mode 100644 index 0000000000..93640fcba6 Binary files /dev/null and b/src/archivematicaCommon/lib/externals/plantUML/plantuml.jar differ diff --git a/src/archivematicaCommon/lib/externals/pyes/__init__.py b/src/archivematicaCommon/lib/externals/pyes/__init__.py new file mode 100644 index 0000000000..abf115eac2 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/__init__.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import logging + +logger = logging.getLogger(__name__) + +VERSION = (0, 19, 0) + +__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:]) +__author__ = "Alberto Paro" +__contact__ = "alberto.paro@gmail.com" +__homepage__ = "http://github.com/aparo/pyes/" +__docformat__ = "restructuredtext" + + +def is_stable_release(): + if len(VERSION) > 3 and isinstance(VERSION[3], basestring): + return False + return not VERSION[1] % 2 + + +def version_with_meta(): + return "%s (%s)" % (__version__, + is_stable_release() and "stable" or "unstable") + +from .es import ES, file_to_attachment, decode_json +from .query import * +from .rivers import * +from .filters import * +#from highlight import HighLighter +from .utils import * + +try: + #useful for additional query extra features + from .query_extra import * +except ImportError: + pass + +try: + #useful for additional features for django users + from .djangoutils import * +except ImportError: + pass diff --git a/src/archivematicaCommon/lib/externals/pyes/connection.py b/src/archivematicaCommon/lib/externals/pyes/connection.py new file mode 100644 index 0000000000..d6ad51de61 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/connection.py @@ -0,0 +1,203 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import random +import socket +import threading +import time + +from thrift import Thrift +from thrift.transport import TTransport +from thrift.transport import TSocket +from thrift.protocol import TBinaryProtocol +from .pyesthrift import Rest + +from .exceptions import NoServerAvailable +from . import logger + +__all__ = ['connect', 'connect_thread_local', 'NoServerAvailable'] + +""" +Work taken from pycassa. + +You need installed "thrift" to use this. +Just do a "pip install thrift". + +""" + +DEFAULT_SERVER = ("thrift", "127.0.0.1", 9500) + +class ClientTransport(object): + """Encapsulation of a client session.""" + + def __init__(self, server, framed_transport, timeout, recycle): + socket = TSocket.TSocket(server.hostname, server.port) + if timeout is not None: + socket.setTimeout(timeout * 1000.0) + if framed_transport: + transport = TTransport.TFramedTransport(socket) + else: + transport = TTransport.TBufferedTransport(socket) + protocol = TBinaryProtocol.TBinaryProtocolAccelerated(transport) + client = Rest.Client(protocol) + transport.open() + + # server_api_version = client.describe_version().split('.', 1) + # assert server_api_version[0] == API_VERSION[0], \ + # "Thrift API version mismatch. " \ + # "(Client: %s, Server: %s)" % (API_VERSION[0], server_api_version[0]) + + self.client = client + self.transport = transport + + if recycle: + self.recycle = time.time() + recycle + random.uniform(0, recycle * 0.1) + else: + self.recycle = None + + +def connect(servers=None, framed_transport=False, timeout=None, + retry_time=60, recycle=None, round_robin=None, max_retries=3): + """ + Constructs a single ElasticSearch connection. Connects to a randomly chosen + server on the list. + + If the connection fails, it will attempt to connect to each server on the + list in turn until one succeeds. If it is unable to find an active server, + it will throw a NoServerAvailable exception. + + Failing servers are kept on a separate list and eventually retried, no + sooner than `retry_time` seconds after failure. + + Parameters + ---------- + servers : [server] + List of ES servers with format: "hostname:port" + + Default: [("127.0.0.1",9500)] + framed_transport: bool + If True, use a TFramedTransport instead of a TBufferedTransport + timeout: float + Timeout in seconds (e.g. 0.5) + + Default: None (it will stall forever) + retry_time: float + Minimum time in seconds until a failed server is reinstated. (e.g. 0.5) + + Default: 60 + recycle: float + Max time in seconds before an open connection is closed and returned to the pool. + + Default: None (Never recycle) + + max_retries: int + Max retry time on connection down + + round_robin: bool + *DEPRECATED* + + Returns + ------- + ES client + """ + + if servers is None: + servers = [DEFAULT_SERVER] + return ThreadLocalConnection(servers, framed_transport, timeout, + retry_time, recycle, max_retries=max_retries) + +connect_thread_local = connect + + +class ServerSet(object): + """Automatically balanced set of servers. + Manages a separate stack of failed servers, and automatic + retrial.""" + + def __init__(self, servers, retry_time=10): + self._lock = threading.RLock() + self._servers = list(servers) + self._retry_time = retry_time + self._dead = [] + + def get(self): + self._lock.acquire() + try: + if self._dead: + ts, revived = self._dead.pop() + if ts > time.time(): # Not yet, put it back + self._dead.append((ts, revived)) + else: + self._servers.append(revived) + logger.info('Server %r reinstated into working pool', revived) + if not self._servers: + logger.critical('No servers available') + raise NoServerAvailable() + return random.choice(self._servers) + finally: + self._lock.release() + + def mark_dead(self, server): + self._lock.acquire() + try: + self._servers.remove(server) + self._dead.insert(0, (time.time() + self._retry_time, server)) + finally: + self._lock.release() + + +class ThreadLocalConnection(object): + def __init__(self, servers, framed_transport=False, timeout=None, + retry_time=10, recycle=None, max_retries=3): + self._servers = ServerSet(servers, retry_time) + self._framed_transport = framed_transport + self._timeout = timeout + self._recycle = recycle + self._max_retries = max_retries + self._local = threading.local() + + def __getattr__(self, attr): + def _client_call(*args, **kwargs): + for retry in xrange(self._max_retries + 1): + try: + conn = self._ensure_connection() + return getattr(conn.client, attr)(*args, **kwargs) + except (Thrift.TException, socket.timeout, socket.error), exc: + logger.exception('Client error: %s', exc) + self.close() + + if retry < self._max_retries: + continue + + raise NoServerAvailable + + setattr(self, attr, _client_call) + return getattr(self, attr) + + def _ensure_connection(self): + """Make certain we have a valid connection and return it.""" + conn = self.connect() + if conn.recycle and conn.recycle < time.time(): + logger.debug('Client session expired after %is. Recycling.', self._recycle) + self.close() + conn = self.connect() + return conn + + def connect(self): + """Create new connection unless we already have one.""" + if not getattr(self._local, 'conn', None): + try: + server = self._servers.get() + logger.debug('Connecting to %s', server) + self._local.conn = ClientTransport(server, self._framed_transport, + self._timeout, self._recycle) + except (Thrift.TException, socket.timeout, socket.error): + logger.warning('Connection to %s failed.', server) + self._servers.mark_dead(server) + return self.connect() + return self._local.conn + + def close(self): + """If a connection is open, close its transport.""" + if self._local.conn: + self._local.conn.transport.close() + self._local.conn = None diff --git a/src/archivematicaCommon/lib/externals/pyes/connection_http.py b/src/archivematicaCommon/lib/externals/pyes/connection_http.py new file mode 100644 index 0000000000..6ecbda049f --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/connection_http.py @@ -0,0 +1,119 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from requests.exceptions import RequestException +from time import time +import random +import threading +import requests +from .exceptions import NoServerAvailable +from .fakettypes import Method, RestResponse +from . import logger + +__all__ = ["connect"] + +DEFAULT_SERVER = ("http", "127.0.0.1", 9200) +SESSION = requests.session() + + +class Connection(object): + """An ElasticSearch connection to a randomly chosen server of the list. + + If the connection fails, it attempts to connect to another random server + of the list until one succeeds. If it is unable to find an active server, + it throws a NoServerAvailable exception. + + Failing servers are kept on a separate list and eventually retried, no + sooner than `retry_time` seconds after failure. + + Parameters + ---------- + + servers: List of ES servers represented as (`scheme`, `hostname`, `port`) + tuples. Default: [("http", "127.0.0.1", 9200)] + + retry_time: Minimum time in seconds until a failed server is reinstated. + Default: 60 + + max_retries: Max number of attempts to connect to some server. + + timeout: Timeout in seconds. Default: None (wait forever) + + basic_auth: Use HTTP Basic Auth. A (`username`, `password`) tuple or a dict + with `username` and `password` keys. + """ + + def __init__(self, servers=None, retry_time=60, max_retries=3, timeout=None, + basic_auth=None): + if servers is None: + servers = [DEFAULT_SERVER] + self._active_servers = [server.geturl() for server in servers] + self._inactive_servers = [] + self._retry_time = retry_time + self._max_retries = max_retries + self._timeout = timeout + if isinstance(basic_auth, dict): + self._auth = (basic_auth["username"], basic_auth["password"]) + else: + self._auth = basic_auth + self._lock = threading.RLock() + self._local = threading.local() + + def execute(self, request): + """Execute a request and return a response""" + retry = 0 + server = getattr(self._local, "server", None) + while True: + if not server: + self._local.server = server = self._get_server() + try: + response = SESSION.request( + method=Method._VALUES_TO_NAMES[request.method], + url=server + request.uri, + params=request.parameters, + data=request.body, + headers=request.headers, + auth=self._auth, + timeout=self._timeout, + ) + return RestResponse(status=response.status_code, + body=response.content, + headers=response.headers) + except RequestException: + self._drop_server(server) + self._local.server = server = None + if retry >= self._max_retries: + logger.error("Client error: bailing out after %d failed retries", + self._max_retries, exc_info=1) + raise NoServerAvailable + logger.debug("Client error: %d retries left", self._max_retries - retry) + retry += 1 + + def _get_server(self): + with self._lock: + try: + ts, server = self._inactive_servers.pop() + except IndexError: + pass + else: + if ts > time(): # Not yet, put it back + self._inactive_servers.append((ts, server)) + else: + self._active_servers.append(server) + logger.info("Restored server %s into active pool", server) + + try: + return random.choice(self._active_servers) + except IndexError: + raise NoServerAvailable + + def _drop_server(self, server): + with self._lock: + try: + self._active_servers.remove(server) + except ValueError: + pass + else: + self._inactive_servers.insert(0, (time() + self._retry_time, server)) + logger.warning("Removed server %s from active pool", server) + +connect = Connection diff --git a/src/archivematicaCommon/lib/externals/pyes/contrib/__init__.py b/src/archivematicaCommon/lib/externals/pyes/contrib/__init__.py new file mode 100644 index 0000000000..bd8f198e1d --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/contrib/__init__.py @@ -0,0 +1,4 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +__author__ = 'alberto' diff --git a/src/archivematicaCommon/lib/externals/pyes/contrib/mappings.py b/src/archivematicaCommon/lib/externals/pyes/contrib/mappings.py new file mode 100644 index 0000000000..da35fe2de5 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/contrib/mappings.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +from pyes.es import ES +from pyes import mappings + +def mappings_to_code(obj, doc_count=0): + result = [] + odict = obj.as_dict() + if isinstance(obj, (mappings.DocumentObjectField, mappings.ObjectField, mappings.NestedObject)): + properties = odict.pop("properties", []) + doc_count += 1 + kwargs = ["name=%r" % obj.name, + "type=%r" % odict.pop("type")] +\ + ["%s=%r" % (k, odict[k]) for k in sorted(odict.keys())] + result.append( + "doc%d=" % doc_count + str(type(obj)).split(".")[-1].strip("'>") + "(" + ', '.join(kwargs) + ")") + for k in sorted(obj.properties.keys()): + result.extend(mappings_to_code(obj.properties[k], doc_count)) + else: + kwargs = ["name=%r" % obj.name, + "type=%r" % odict.pop("type"), + "store=%r" % obj.store, + "index=%r" % odict.pop("index")] +\ + ["%s=%r" % (k, odict[k]) for k in sorted(odict.keys())] + result.append("doc%d.add_property(" % doc_count +\ + str(type(obj)).split(".")[-1].strip("'>") + "(" +\ + ', '.join(kwargs) + "))") + + return result + +if __name__ == '__main__': + es = ES("192.168.1.1:9200") + res = mappings_to_code(es.mappings.get_doctype("twitter", "twitter")) + print "\n".join(res) + + diff --git a/src/archivematicaCommon/lib/externals/pyes/convert_errors.py b/src/archivematicaCommon/lib/externals/pyes/convert_errors.py new file mode 100644 index 0000000000..84054a6ea6 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/convert_errors.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +""" +Routines for converting error responses to appropriate exceptions. +""" +from . import exceptions + +__all__ = ['raise_if_error'] + +# Patterns used to map exception strings to classes. + +# First, exceptions for which the messages start with the error name, +# and then contain the error description wrapped in []. +exceptions_by_name = dict((name, getattr(exceptions, name)) +for name in ( + "DocumentAlreadyExistsEngineException", + "DocumentAlreadyExistsException", + "TypeMissingException", + "VersionConflictEngineException", + 'ClusterBlockException', + 'ElasticSearchIllegalArgumentException', + 'IndexAlreadyExistsException', + 'IndexMissingException', + 'MapperParsingException', + 'ReduceSearchPhaseException', + 'ReplicationShardOperationFailedException', + 'SearchPhaseExecutionException', + ) +) + +# Second, patterns for exceptions where the message is just the error +# description, and doesn't contain an error name. These patterns are matched +# at the end of the exception. +exception_patterns_trailing = { + '] missing': exceptions.NotFoundException, + '] Already exists': exceptions.AlreadyExistsException, + } + +def raise_if_error(status, result, request=None): + """Raise an appropriate exception if the result is an error. + + Any result with a status code of 400 or higher is considered an error. + + The exception raised will either be an ElasticSearchException, or a more + specific subclass of ElasticSearchException if the type is recognised. + + The status code and result can be retrieved from the exception by accessing its + status and result properties. + + Optionally, this can take the original RestRequest instance which generated + this error, which will then get included in the exception. + + """ + assert isinstance(status, int) + + if status < 400: + return + + if status == 404 and isinstance(result, dict) and 'error' not in result: + raise exceptions.NotFoundException("Item not found", status, result, request) + + if not isinstance(result, dict) or 'error' not in result: + raise exceptions.ElasticSearchException(u"Unknown exception type: %d, %s" % (status, result), status, + result, request) + + error = result['error'] + if '; nested: ' in error: + error_list = error.split('; nested: ') + error = error_list[len(error_list) - 1] + + bits = error.split('[', 1) + if len(bits) == 2: + excClass = exceptions_by_name.get(bits[0], None) + if excClass is not None: + msg = bits[1] + if msg.endswith(']'): + msg = msg[:-1] + ''' + if request: + msg += ' (' + str(request) + ')' + ''' + raise excClass(msg, status, result, request) + + for pattern, excClass in exception_patterns_trailing.iteritems(): + if not error.endswith(pattern): + continue + # For these exceptions, the returned value is the whole descriptive + # message. + raise excClass(error, status, result, request) + + raise exceptions.ElasticSearchException(error, status, result, request) diff --git a/src/archivematicaCommon/lib/externals/pyes/djangoutils.py b/src/archivematicaCommon/lib/externals/pyes/djangoutils.py new file mode 100644 index 0000000000..7fdfc8339b --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/djangoutils.py @@ -0,0 +1,150 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +from types import NoneType +import datetime +from django.db import models +import uuid + +__author__ = 'Alberto Paro' +__all__ = ["get_values"] + + +#--- taken from http://djangosnippets.org/snippets/2278/ + +def get_values(instance, go_into={}, exclude=(), extra=(), skip_none=False): + """ + Transforms a django model instance into an object that can be used for + serialization. + @param instance(django.db.models.Model) - the model in question + @param go_into(dict) - relations with other models that need expanding + @param exclude(tuple) - fields that will be ignored + @param extra(tuple) - additional functions/properties which are not fields + @param skip_none(bool) - skip None field + + Usage: + get_values(MyModel.objects.get(pk=187), + {'user': {'go_into': ('clan',), + 'exclude': ('crest_blob',), + 'extra': ('get_crest_path',)}}, + ('image')) + + """ + from django.db.models.manager import Manager + from django.db.models import Model + + SIMPLE_TYPES = (int, long, str, list, dict, tuple, bool, float, bool, + unicode, NoneType) + + if not isinstance(instance, Model): + raise TypeError("Argument is not a Model") + + value = { + 'pk': instance.pk, + } + + # check for simple string instead of tuples + # and dicts; this is shorthand syntax + if isinstance(go_into, str): + go_into = {go_into: {}} + + if isinstance(exclude, str): + exclude = (exclude,) + + if isinstance(extra, str): + extra = (extra,) + + # process the extra properties/function/whatever + for field in extra: + property = getattr(instance, field) + + if callable(property): + property = property() + + if skip_none and property is None: + continue + elif isinstance(property, SIMPLE_TYPES): + value[field] = property + else: + value[field] = repr(property) + + field_options = instance._meta.get_all_field_names() + for field in field_options: + try: + property = getattr(instance, field) + except: + continue + if skip_none and property is None: + continue + + if field in exclude or field[0] == '_' or isinstance(property, Manager): + # if it's in the exclude tuple, ignore it + # if it's a "private" field, ignore it + # if it's an instance of manager (this means a more complicated + # relationship), ignore it + continue + elif go_into.has_key(field): + # if it's in the go_into dict, make a recursive call for that field + try: + field_go_into = go_into[field].get('go_into', {}) + except AttributeError: + field_go_into = {} + + try: + field_exclude = go_into[field].get('exclude', ()) + except AttributeError: + field_exclude = () + + try: + field_extra = go_into[field].get('extra', ()) + except AttributeError: + field_extra = () + + value[field] = get_values(property, + field_go_into, + field_exclude, + field_extra, skip_none=skip_none) + else: + if isinstance(property, Model): + # if it's a model, we need it's PK # + value[field] = property.pk + elif isinstance(property, (datetime.date, + datetime.time, + datetime.datetime)): + value[field] = property + else: + # else, we just put the value # + if callable(property): + property = property() + + if isinstance(property, SIMPLE_TYPES): + value[field] = property + else: + value[field] = repr(property) + + return value + + +class EmbeddedModel(models.Model): + _embedded_in = None + + class Meta: + abstract = True + + def save(self, *args, **kwargs): + if self.pk is None: + self.pk = str(uuid.uuid4()) + if self._embedded_in is None: + raise RuntimeError("Invalid save") + self._embedded_in.save() + + def serialize(self): + if self.pk is None: + self.pk = "TODO" + self.id = self.pk + result = {'_app': self._meta.app_label, + '_model': self._meta.module_name, + '_id': self.pk} + for field in self._meta.fields: + result[field.attname] = getattr(self, field.attname) + return result diff --git a/src/archivematicaCommon/lib/externals/pyes/engine/base.py b/src/archivematicaCommon/lib/externals/pyes/engine/base.py new file mode 100644 index 0000000000..25a5c713f1 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/engine/base.py @@ -0,0 +1,1333 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pyes.es import ElasticSearchModel +import warnings + +from queryset import QuerySet, QuerySetManager +from queryset import DoesNotExist, MultipleObjectsReturned +from queryset import DO_NOTHING + +from . import signals + +import sys +import operator + +from functools import partial + + +class NotRegistered(Exception): + pass + + +class InvalidDocumentError(Exception): + pass + + +class ValidationError(AssertionError): + """Validation exception. + """ + errors = {} + field_name = None + _message = None + + def __init__(self, message="", **kwargs): + self.errors = kwargs.get('errors', {}) + self.field_name = kwargs.get('field_name') + self.message = message + + def __str__(self): + return self.message + + def __repr__(self): + return '%s(%s,)' % (self.__class__.__name__, self.message) + + def __getattribute__(self, name): + message = super(ValidationError, self).__getattribute__(name) + if name == 'message' and self.field_name: + return message + ' ("%s")' % self.field_name + else: + return message + + def _get_message(self): + return self._message + + def _set_message(self, message): + self._message = message + + message = property(_get_message, _set_message) + + def to_dict(self): + def build_dict(source): + errors_dict = {} + if not source: + return errors_dict + if isinstance(source, dict): + for field_name, error in source.iteritems(): + errors_dict[field_name] = build_dict(error) + elif isinstance(source, ValidationError) and source.errors: + return build_dict(source.errors) + else: + return unicode(source) + return errors_dict + if not self.errors: + return {} + return build_dict(self.errors) + + +_document_registry = {} + + +def get_document(name): + doc = _document_registry.get(name, None) + if not doc: + # Possible old style names + end = ".%s" % name + possible_match = [k for k in _document_registry.keys() if k.endswith(end)] + if len(possible_match) == 1: + doc = _document_registry.get(possible_match.pop(), None) + if not doc: + raise NotRegistered(""" + `%s` has not been registered in the document registry. + Importing the document class automatically registers it, has it + been imported? + """.strip() % name) + return doc + + +class BaseField(object): + """A base class for fields in a ElasticSearch document. Instances of this class + may be added to subclasses of `Document` to define a document's schema. + + """ + + name = None + + # Fields may have _types inserted into indexes by default + _index_with_types = True + _geo_index = False + + # These track each time a Field instance is created. Used to retain order. + # The auto_creation_counter is used for fields that ESEngine implicitly + # creates, creation_counter is used for all user-specified fields. + creation_counter = 0 + auto_creation_counter = -1 + + def __init__(self, db_field=None, name=None, required=False, default=None, + unique=False, unique_with=None, primary_key=False, + validation=None, choices=None, verbose_name=None, help_text=None): + self.db_field = (db_field or name) if not primary_key else '_id' + if name: + msg = "Fields' 'name' attribute deprecated in favour of 'db_field'" + warnings.warn(msg, DeprecationWarning) + self.name = None + self.required = required or primary_key + self.default = default + self.unique = bool(unique or unique_with) + self.unique_with = unique_with + self.primary_key = primary_key + self.validation = validation + self.choices = choices + self.verbose_name = verbose_name + self.help_text = help_text + + # Adjust the appropriate creation counter, and save our local copy. + if self.db_field == '_id': + self.creation_counter = BaseField.auto_creation_counter + BaseField.auto_creation_counter -= 1 + else: + self.creation_counter = BaseField.creation_counter + BaseField.creation_counter += 1 + + def __get__(self, instance, owner): + """Descriptor for retrieving a value from a field in a document. Do + any necessary conversion between Python and ElasticSearch types. + """ + if instance is None: + # Document class being used rather than a document object + return self + + # Get value from document instance if available, if not use default + value = instance._data.get(self.name) + + if value is None: + value = self.default + # Allow callable default values + if callable(value): + value = value() + + return value + + def __set__(self, instance, value): + """Descriptor for assigning a value to a field in a document. + """ + instance._data[self.name] = value + instance._mark_as_changed(self.name) + + def error(self, message="", errors=None, field_name=None): + """Raises a ValidationError. + """ + field_name = field_name if field_name else self.name + raise ValidationError(message, errors=errors, field_name=field_name) + + def to_python(self, value): + """Convert a ElasticSearch-compatible type to a Python type. + """ + return value + + def to_es(self, value): + """Convert a Python type to a ElasticSearch-compatible type. + """ + return self.to_python(value) + + def prepare_query_value(self, op, value): + """Prepare a value that is being used in a query for PyMongo. + """ + return value + + def validate(self, value): + """Perform validation on a value. + """ + pass + + def _validate(self, value): + + # check choices + if self.choices: + if isinstance(self.choices[0], (list, tuple)): + option_keys = [option_key for option_key, option_value in self.choices] + if value not in option_keys: + self.error('Value must be one of %s' % unicode(option_keys)) + else: + if value not in self.choices: + self.error('Value must be one of %s' % unicode(self.choices)) + + # check validation argument + if self.validation is not None: + if callable(self.validation): + if not self.validation(value): + self.error('Value does not match custom validation method') + else: + raise ValueError('validation argument for "%s" must be a ' + 'callable.' % self.name) + + self.validate(value) + + +class ComplexBaseField(BaseField): + """Handles complex fields, such as lists / dictionaries. + + Allows for nesting of embedded documents inside complex types. + Handles the lazy dereferencing of a queryset by lazily dereferencing all + items in a list / dict rather than one at a time. + + .. versionadded:: 0.5 + """ + + field = None + _dereference = False + + def __get__(self, instance, owner): + """Descriptor to automatically dereference references. + """ + if instance is None: + # Document class being used rather than a document object + return self + + if not self._dereference and instance._initialised: + from dereference import DeReference + self._dereference = DeReference() # Cached + instance._data[self.name] = self._dereference( + instance._data.get(self.name), max_depth=1, instance=instance, + name=self.name + ) + + value = super(ComplexBaseField, self).__get__(instance, owner) + + # Convert lists / values so we can watch for any changes on them + if isinstance(value, (list, tuple)) and not isinstance(value, BaseList): + value = BaseList(value, instance, self.name) + instance._data[self.name] = value + elif isinstance(value, dict) and not isinstance(value, BaseDict): + value = BaseDict(value, instance, self.name) + instance._data[self.name] = value + + if self._dereference and instance._initialised and \ + isinstance(value, (BaseList, BaseDict)) and not value._dereferenced: + value = self._dereference( + value, max_depth=1, instance=instance, name=self.name + ) + value._dereferenced = True + instance._data[self.name] = value + + return value + + def __set__(self, instance, value): + """Descriptor for assigning a value to a field in a document. + """ + instance._data[self.name] = value + instance._mark_as_changed(self.name) + + def to_python(self, value): + """Convert a ElasticSearch-compatible type to a Python type. + """ + from pyes.engine.document import Document + + if isinstance(value, basestring): + return value + + if hasattr(value, 'to_python'): + return value.to_python() + + is_list = False + if not hasattr(value, 'items'): + try: + is_list = True + value = dict([(k, v) for k, v in enumerate(value)]) + except TypeError: # Not iterable return the value + return value + + if self.field: + value_dict = dict([(key, self.field.to_python(item)) for key, item in value.items()]) + else: + value_dict = {} + for k, v in value.items(): + if isinstance(v, Document): + # We need the id from the saved object to create the DBRef + if v.pk is None: + self.error('You can only reference documents once they' + ' have been saved to the database') + collection = v._get_collection_name() + #value_dict[k] = DBRef(collection, v.pk) //TODO add link + elif hasattr(v, 'to_python'): + value_dict[k] = v.to_python() + else: + value_dict[k] = self.to_python(v) + + if is_list: # Convert back to a list + return [v for k, v in sorted(value_dict.items(), key=operator.itemgetter(0))] + return value_dict + + def to_es(self, value): + """Convert a Python type to a ElasticSearch-compatible type. + """ + from pyes.engine.document import Document + + if isinstance(value, basestring): + return value + + if hasattr(value, 'to_es'): + return value.to_es() + + is_list = False + if not hasattr(value, 'items'): + try: + is_list = True + value = dict([(k, v) for k, v in enumerate(value)]) + except TypeError: # Not iterable return the value + return value + + if self.field: + value_dict = dict([(key, self.field.to_es(item)) for key, item in value.items()]) + else: + value_dict = {} + for k, v in value.items(): + if isinstance(v, Document): + # We need the id from the saved object to create the DBRef + if v.pk is None: + self.error('You can only reference documents once they' + ' have been saved to the database') + + # If its a document that is not inheritable it won't have + # _types / _cls data so make it a generic reference allows + # us to dereference + meta = getattr(v, 'meta', getattr(v, '_meta', {})) + if meta and not meta.get('allow_inheritance', True) and not self.field: + from fields import GenericReferenceField + value_dict[k] = GenericReferenceField().to_es(v) + else: + collection = v._get_collection_name() + #value_dict[k] = DBRef(collection, v.pk) //TODO add link + elif hasattr(v, 'to_es'): + value_dict[k] = v.to_es() + else: + value_dict[k] = self.to_es(v) + + if is_list: # Convert back to a list + return [v for k, v in sorted(value_dict.items(), key=operator.itemgetter(0))] + return value_dict + + def validate(self, value): + """If field is provided ensure the value is valid. + """ + errors = {} + if self.field: + if hasattr(value, 'iteritems'): + sequence = value.iteritems() + else: + sequence = enumerate(value) + for k, v in sequence: + try: + self.field.validate(v) + except (ValidationError, AssertionError), error: + if hasattr(error, 'errors'): + errors[k] = error.errors + else: + errors[k] = error + if errors: + field_class = self.field.__class__.__name__ + self.error('Invalid %s item (%s)' % (field_class, value), + errors=errors) + # Don't allow empty values if required + if self.required and not value: + self.error('Field is required and cannot be empty') + + def prepare_query_value(self, op, value): + return self.to_es(value) + + def lookup_member(self, member_name): + if self.field: + return self.field.lookup_member(member_name) + return None + + def _set_owner_document(self, owner_document): + if self.field: + self.field.owner_document = owner_document + self._owner_document = owner_document + + def _get_owner_document(self, owner_document): + self._owner_document = owner_document + + owner_document = property(_get_owner_document, _set_owner_document) + + +class BaseDynamicField(BaseField): + """Used by :class:`~esengine.DynamicDocument` to handle dynamic data""" + + def to_es(self, value): + """Convert a Python type to a ElasticSearch-compatible type. + """ + + if isinstance(value, basestring): + return value + + if hasattr(value, 'to_es'): + return value.to_es() + + if not isinstance(value, (dict, list, tuple)): + return value + + is_list = False + if not hasattr(value, 'items'): + is_list = True + value = dict([(k, v) for k, v in enumerate(value)]) + + data = {} + for k, v in value.items(): + data[k] = self.to_es(v) + + if is_list: # Convert back to a list + value = [v for k, v in sorted(data.items(), key=operator.itemgetter(0))] + else: + value = data + return value + + def lookup_member(self, member_name): + return member_name + + def prepare_query_value(self, op, value): + if isinstance(value, basestring): + from fields import StringField + return StringField().prepare_query_value(op, value) + return self.to_es(value) + + +class ObjectIdField(BaseField): + """ + An field wrapper around ID object for ElasticSearch. + """ + + def to_python(self, value): + return value + + def to_es(self, value): + if not isinstance(value, basestring): + try: + return unicode(value) + except Exception, e: + # e.message attribute has been deprecated since Python 2.6 + self.error(unicode(e)) + return value + + def prepare_query_value(self, op, value): + return self.to_es(value) + + def validate(self, value): + try: + unicode(value) + except: + self.error('Invalid Object ID') + + +class DocumentMetaclass(type): + """Metaclass for all documents. + """ + + def __new__(cls, name, bases, attrs): + def _get_mixin_fields(base): + attrs = {} + attrs.update(dict([(k, v) for k, v in base.__dict__.items() + if issubclass(v.__class__, BaseField)])) + + # Handle simple mixin's with meta + if hasattr(base, 'meta') and not isinstance(base, DocumentMetaclass): + meta = attrs.get('meta', {}) + meta.update(base.meta) + attrs['meta'] = meta + + for p_base in base.__bases__: + #optimize :-) + if p_base in (object, BaseDocument): + continue + + attrs.update(_get_mixin_fields(p_base)) + return attrs + + metaclass = attrs.get('__metaclass__') + super_new = super(DocumentMetaclass, cls).__new__ + if metaclass and issubclass(metaclass, DocumentMetaclass): + return super_new(cls, name, bases, attrs) + + doc_fields = {} + class_name = [name] + superclasses = {} + simple_class = True + + for base in bases: + + # Include all fields present in superclasses + if hasattr(base, '_fields'): + doc_fields.update(base._fields) + # Get superclasses from superclass + superclasses[base._class_name] = base + superclasses.update(base._superclasses) + else: # Add any mixin fields + attrs.update(_get_mixin_fields(base)) + + if hasattr(base, '_meta') and not base._meta.get('abstract'): + # Ensure that the Document class may be subclassed - + # inheritance may be disabled to remove dependency on + # additional fields _cls and _types + class_name.append(base._class_name) + if not base._meta.get('allow_inheritance_defined', True): + warnings.warn( + "%s uses inheritance, the default for allow_inheritance " + "is changing to off by default. Please add it to the " + "document meta." % name, + FutureWarning + ) + if not base._meta.get('allow_inheritance', True): + raise ValueError('Document %s may not be subclassed' % + base.__name__) + else: + simple_class = False + + doc_class_name = '.'.join(reversed(class_name)) + meta = attrs.get('_meta', {}) + meta.update(attrs.get('meta', {})) + + if 'allow_inheritance' not in meta: + meta['allow_inheritance'] = True + + # Only simple classes - direct subclasses of Document - may set + # allow_inheritance to False + if not simple_class and not meta['allow_inheritance'] and not meta['abstract']: + raise ValueError('Only direct subclasses of Document may set ' + '"allow_inheritance" to False') + attrs['_meta'] = meta + attrs['_class_name'] = doc_class_name + attrs['_superclasses'] = superclasses + + # Add the document's fields to the _fields attribute + field_names = {} + for attr_name, attr_value in attrs.items(): + if hasattr(attr_value, "__class__") and \ + issubclass(attr_value.__class__, BaseField): + attr_value.name = attr_name + if not attr_value.db_field: + attr_value.db_field = attr_name + doc_fields[attr_name] = attr_value + field_names[attr_value.db_field] = field_names.get(attr_value.db_field, 0) + 1 + + duplicate_db_fields = [k for k, v in field_names.items() if v > 1] + if duplicate_db_fields: + raise InvalidDocumentError("Multiple db_fields defined for: %s " % ", ".join(duplicate_db_fields)) + attrs['_fields'] = doc_fields + attrs['_db_field_map'] = dict([(k, v.db_field) for k, v in doc_fields.items() if k != v.db_field]) + attrs['_reverse_db_field_map'] = dict([(v, k) for k, v in attrs['_db_field_map'].items()]) + + from document import Document, EmbeddedDocument, DictField + + new_class = super_new(cls, name, bases, attrs) + for field in new_class._fields.values(): + field.owner_document = new_class + + delete_rule = getattr(field, 'reverse_delete_rule', DO_NOTHING) + f = field + if isinstance(f, ComplexBaseField) and hasattr(f, 'field'): + delete_rule = getattr(f.field, 'reverse_delete_rule', DO_NOTHING) + if isinstance(f, DictField) and delete_rule != DO_NOTHING: + raise InvalidDocumentError("Reverse delete rules are not supported for %s (field: %s)" % (field.__class__.__name__, field.name)) + f = field.field + + if delete_rule != DO_NOTHING: + if issubclass(new_class, EmbeddedDocument): + raise InvalidDocumentError("Reverse delete rules are not supported for EmbeddedDocuments (field: %s)" % field.name) + f.document_type.register_delete_rule(new_class, field.name, delete_rule) + + if field.name and hasattr(Document, field.name) and EmbeddedDocument not in new_class.mro(): + raise InvalidDocumentError("%s is a document method and not a valid field name" % field.name) + + module = attrs.get('__module__') + + base_excs = tuple(base.DoesNotExist for base in bases + if hasattr(base, 'DoesNotExist')) or (DoesNotExist,) + exc = subclass_exception('DoesNotExist', base_excs, module) + new_class.add_to_class('DoesNotExist', exc) + + base_excs = tuple(base.MultipleObjectsReturned for base in bases + if hasattr(base, 'MultipleObjectsReturned')) + base_excs = base_excs or (MultipleObjectsReturned,) + exc = subclass_exception('MultipleObjectsReturned', base_excs, module) + new_class.add_to_class('MultipleObjectsReturned', exc) + + global _document_registry + _document_registry[doc_class_name] = new_class + + return new_class + + def add_to_class(self, name, value): + setattr(self, name, value) + + +class TopLevelDocumentMetaclass(DocumentMetaclass): + """Metaclass for top-level documents (i.e. documents that have their own + collection in the database. + """ + + def __new__(cls, name, bases, attrs): + super_new = super(TopLevelDocumentMetaclass, cls).__new__ + # Classes defined in this package are abstract and should not have + # their own metadata with DB collection, etc. + # __metaclass__ is only set on the class with the __metaclass__ + # attribute (i.e. it is not set on subclasses). This differentiates + # 'real' documents from the 'Document' class + # + # Also assume a class is abstract if it has abstract set to True in + # its meta dictionary. This allows custom Document superclasses. + if (attrs.get('__metaclass__') == TopLevelDocumentMetaclass or + ('meta' in attrs and attrs['meta'].get('abstract', False))): + # Make sure no base class was non-abstract + non_abstract_bases = [b for b in bases + if hasattr(b, '_meta') and not b._meta.get('abstract', False)] + if non_abstract_bases: + raise ValueError("Abstract document cannot have non-abstract base") + return super_new(cls, name, bases, attrs) + + collection = ''.join('_%s' % c if c.isupper() else c for c in name).strip('_').lower() + + id_field = None + abstract_base_indexes = [] + base_indexes = [] + base_meta = {} + + # Subclassed documents inherit collection from superclass + for base in bases: + if hasattr(base, '_meta'): + if 'collection' in attrs.get('meta', {}) and not base._meta.get('abstract', False): + import warnings + msg = "Trying to set a collection on a subclass (%s)" % name + warnings.warn(msg, SyntaxWarning) + del(attrs['meta']['collection']) + if base._get_collection_name(): + collection = base._get_collection_name() + # Propagate index options. + for key in ('index_background', 'index_drop_dups', 'index_opts'): + if key in base._meta: + base_meta[key] = base._meta[key] + + id_field = id_field or base._meta.get('id_field') + if base._meta.get('abstract', False): + abstract_base_indexes += base._meta.get('indexes', []) + else: + base_indexes += base._meta.get('indexes', []) + # Propagate 'allow_inheritance' + if 'allow_inheritance' in base._meta: + base_meta['allow_inheritance'] = base._meta['allow_inheritance'] + if 'queryset_class' in base._meta: + base_meta['queryset_class'] = base._meta['queryset_class'] + try: + base_meta['objects'] = base.__getattribute__(base, 'objects') + except TypeError: + pass + except AttributeError: + pass + + meta = { + 'abstract': False, + 'collection': collection, + 'max_documents': None, + 'max_size': None, + 'ordering': [], # default ordering applied at runtime + 'indexes': [], # indexes to be ensured at runtime + 'id_field': id_field, + 'index_background': False, + 'index_drop_dups': False, + 'index_opts': {}, + 'queryset_class': QuerySet, + 'delete_rules': {}, + 'allow_inheritance': True + } + + allow_inheritance_defined = ('allow_inheritance' in base_meta or + 'allow_inheritance'in attrs.get('meta', {})) + meta['allow_inheritance_defined'] = allow_inheritance_defined + meta.update(base_meta) + + # Apply document-defined meta options + meta.update(attrs.get('meta', {})) + attrs['_meta'] = meta + + # Set up collection manager, needs the class to have fields so use + # DocumentMetaclass before instantiating CollectionManager object + new_class = super_new(cls, name, bases, attrs) + + collection = attrs['_meta'].get('collection', None) + if callable(collection): + new_class._meta['collection'] = collection(new_class) + + # Provide a default queryset unless one has been manually provided + manager = attrs.get('objects', meta.get('objects', QuerySetManager())) + if hasattr(manager, 'queryset_class'): + meta['queryset_class'] = manager.queryset_class + new_class.objects = manager + + indicies = meta['indexes'] + abstract_base_indexes + user_indexes = [QuerySet._build_index_spec(new_class, spec) + for spec in indicies] + base_indexes + new_class._meta['indexes'] = user_indexes + + unique_indexes = cls._unique_with_indexes(new_class) + new_class._meta['unique_indexes'] = unique_indexes + + for field_name, field in new_class._fields.items(): + # Check for custom primary key + if field.primary_key: + current_pk = new_class._meta['id_field'] + if current_pk and current_pk != field_name: + raise ValueError('Cannot override primary key field') + + if not current_pk: + new_class._meta['id_field'] = field_name + # Make 'Document.id' an alias to the real primary key field + new_class.id = field + + if not new_class._meta['id_field']: + new_class._meta['id_field'] = 'id' + new_class._fields['id'] = ObjectIdField(db_field='_id') + new_class.id = new_class._fields['id'] + + return new_class + + @classmethod + def _unique_with_indexes(cls, new_class, namespace=""): + unique_indexes = [] + for field_name, field in new_class._fields.items(): + # Generate a list of indexes needed by uniqueness constraints + if field.unique: + field.required = True + unique_fields = [field.db_field] + + # Add any unique_with fields to the back of the index spec + if field.unique_with: + if isinstance(field.unique_with, basestring): + field.unique_with = [field.unique_with] + + # Convert unique_with field names to real field names + unique_with = [] + for other_name in field.unique_with: + parts = other_name.split('.') + # Lookup real name + parts = QuerySet._lookup_field(new_class, parts) + name_parts = [part.db_field for part in parts] + unique_with.append('.'.join(name_parts)) + # Unique field should be required + parts[-1].required = True + unique_fields += unique_with + + # Add the new index to the list + index = [("%s%s" % (namespace, f)) for f in unique_fields] + unique_indexes.append(index) + + # Grab any embedded document field unique indexes + if field.__class__.__name__ == "EmbeddedDocumentField" and field.document_type != new_class: + field_namespace = "%s." % field_name + unique_indexes += cls._unique_with_indexes(field.document_type, + field_namespace) + + return unique_indexes + + +class BaseDocument(object): + + _dynamic = False + _created = True + _dynamic_lock = True + _initialised = False + + def __init__(self, **values): + signals.pre_init.send(self.__class__, document=self, values=values) + + self._data = {} + + # Assign default values to instance + for attr_name, field in self._fields.items(): + value = getattr(self, attr_name, None) + setattr(self, attr_name, value) + + # Set passed values after initialisation + if self._dynamic: + self._dynamic_fields = {} + dynamic_data = {} + for key, value in values.items(): + if key in self._fields or key == '_id': + setattr(self, key, value) + elif self._dynamic: + dynamic_data[key] = value + else: + for key, value in values.items(): + setattr(self, key, value) + + # Set any get_fieldname_display methods + self.__set_field_display() + + if self._dynamic: + self._dynamic_lock = False + for key, value in dynamic_data.items(): + setattr(self, key, value) + + # Flag initialised + self._initialised = True + signals.post_init.send(self.__class__, document=self) + + def __setattr__(self, name, value): + # Handle dynamic data only if an initialised dynamic document + if self._dynamic and not self._dynamic_lock: + + if not hasattr(self, name) and not name.startswith('_'): + field = BaseDynamicField(db_field=name) + field.name = name + self._dynamic_fields[name] = field + + if not name.startswith('_'): + value = self.__expand_dynamic_values(name, value) + + # Handle marking data as changed + if name in self._dynamic_fields: + self._data[name] = value + if hasattr(self, '_changed_fields'): + self._mark_as_changed(name) + + # Handle None values for required fields + if value is None and name in getattr(self, '_fields', {}): + self._data[name] = value + if hasattr(self, '_changed_fields'): + self._mark_as_changed(name) + return + + if not self._created and name in self._meta.get('shard_key', tuple()): + from queryset import OperationError + raise OperationError("Shard Keys are immutable. Tried to update %s" % name) + + super(BaseDocument, self).__setattr__(name, value) + + def __expand_dynamic_values(self, name, value): + """expand any dynamic values to their correct types / values""" + if not isinstance(value, (dict, list, tuple)): + return value + + is_list = False + if not hasattr(value, 'items'): + is_list = True + value = dict([(k, v) for k, v in enumerate(value)]) + + if not is_list and '_cls' in value: + cls = get_document(value['_cls']) + value = cls(**value) + value._dynamic = True + value._changed_fields = [] + return value + + data = {} + for k, v in value.items(): + key = name if is_list else k + data[k] = self.__expand_dynamic_values(key, v) + + if is_list: # Convert back to a list + data_items = sorted(data.items(), key=operator.itemgetter(0)) + value = [v for k, v in data_items] + else: + value = data + + # Convert lists / values so we can watch for any changes on them + if isinstance(value, (list, tuple)) and not isinstance(value, BaseList): + value = BaseList(value, self, name) + elif isinstance(value, dict) and not isinstance(value, BaseDict): + value = BaseDict(value, self, name) + + return value + + def validate(self): + """Ensure that all fields' values are valid and that required fields + are present. + """ + # Get a list of tuples of field names and their current values + fields = [(field, getattr(self, name)) + for name, field in self._fields.items()] + + # Ensure that each field is matched to a valid value + errors = {} + for field, value in fields: + if value is not None: + try: + field._validate(value) + except ValidationError, error: + errors[field.name] = error.errors or error + except (ValueError, AttributeError, AssertionError), error: + errors[field.name] = error + elif field.required: + errors[field.name] = ValidationError('Field is required', + field_name=field.name) + if errors: + raise ValidationError('Errors encountered validating document', + errors=errors) + + def to_es(self): + """Return data dictionary ready for use with ElasticSearch. + """ + data = ElasticSearchModel() + for field_name, field in self._fields.items(): + value = getattr(self, field_name, None) + if value is not None: + if field.db_field=="id": + data._meta.id = field.to_es(value) + else: + data[field.db_field] = field.to_es(value) + # Only add _cls and _types if allow_inheritance is not False + if not (hasattr(self, '_meta') and + self._meta.get('allow_inheritance', True) == False): + data._meta.type=self._meta.get("collection") + #data._meta.types= self._superclasses.keys() + [self._class_name] + + if not self._dynamic: + return data + + for name, field in self._dynamic_fields.items(): + data[name] = field.to_es(self._data.get(name, None)) + return data + + @classmethod + def _get_collection_name(cls): + """Returns the collection name for this class. + """ + return cls._meta.get('collection', None) + + @classmethod + def _from_son(cls, son): + """Create an instance of a Document (subclass). + """ + # get the class name from the document, falling back to the given + # class if unavailable + class_name = son.get(u'_cls', cls._class_name) + data = dict((str(key), value) for key, value in son.items()) + + if '_types' in data: + del data['_types'] + + if '_cls' in data: + del data['_cls'] + + # Return correct subclass for document type + if class_name != cls._class_name: + cls = get_document(class_name) + + changed_fields = [] + for field_name, field in cls._fields.items(): + if field.db_field in data: + value = data[field.db_field] + data[field_name] = (value if value is None + else field.to_python(value)) + elif field.default: + default = field.default + if callable(default): + default = default() + if isinstance(default, BaseDocument): + changed_fields.append(field_name) + + obj = cls(**data) + obj._changed_fields = changed_fields + obj._created = False + return obj + + def _mark_as_changed(self, key): + """Marks a key as explicitly changed by the user + """ + if not key: + return + key = self._db_field_map.get(key, key) + if hasattr(self, '_changed_fields') and key not in self._changed_fields: + self._changed_fields.append(key) + + def _get_changed_fields(self, key='', inspected=None): + """Returns a list of all fields that have explicitly been changed. + """ + from document import EmbeddedDocument, DynamicEmbeddedDocument + _changed_fields = [] + _changed_fields += getattr(self, '_changed_fields', []) + + inspected = inspected or set() + if hasattr(self, 'id'): + if self.id in inspected: + return _changed_fields + inspected.add(self.id) + + field_list = self._fields.copy() + if self._dynamic: + field_list.update(self._dynamic_fields) + + for field_name in field_list: + db_field_name = self._db_field_map.get(field_name, field_name) + key = '%s.' % db_field_name + field = getattr(self, field_name, None) + if hasattr(field, 'id'): + if field.id in inspected: + continue + inspected.add(field.id) + + if isinstance(field, (EmbeddedDocument, DynamicEmbeddedDocument)) and db_field_name not in _changed_fields: # Grab all embedded fields that have been changed + _changed_fields += ["%s%s" % (key, k) for k in field._get_changed_fields(key, inspected) if k] + elif isinstance(field, (list, tuple, dict)) and db_field_name not in _changed_fields: # Loop list / dict fields as they contain documents + # Determine the iterator to use + if not hasattr(field, 'items'): + iterator = enumerate(field) + else: + iterator = field.iteritems() + for index, value in iterator: + if not hasattr(value, '_get_changed_fields'): + continue + list_key = "%s%s." % (key, index) + _changed_fields += ["%s%s" % (list_key, k) for k in value._get_changed_fields(list_key, inspected) if k] + return _changed_fields + + def _delta(self): + """Returns the delta (set, unset) of the changes for a document. + Gets any values that have been explicitly changed. + """ + # Handles cases where not loaded from_son but has _id + doc = self.to_es() + set_fields = self._get_changed_fields() + set_data = {} + unset_data = {} + parts = [] + if hasattr(self, '_changed_fields'): + set_data = {} + # Fetch each set item from its path + for path in set_fields: + parts = path.split('.') + d = doc + for p in parts: + if hasattr(d, '__getattr__'): + d = getattr(p, d) + elif p.isdigit(): + d = d[int(p)] + else: + d = d.get(p) + set_data[path] = d + else: + set_data = doc + if '_id' in set_data: + del(set_data['_id']) + + # Determine if any changed items were actually unset. + for path, value in set_data.items(): + if value or isinstance(value, bool): + continue + + # If we've set a value that ain't the default value dont unset it. + default = None + if self._dynamic and len(parts) and parts[0] in self._dynamic_fields: + del(set_data[path]) + unset_data[path] = 1 + continue + elif path in self._fields: + default = self._fields[path].default + else: # Perform a full lookup for lists / embedded lookups + d = self + parts = path.split('.') + db_field_name = parts.pop() + for p in parts: + if p.isdigit(): + d = d[int(p)] + elif hasattr(d, '__getattribute__') and not isinstance(d, dict): + real_path = d._reverse_db_field_map.get(p, p) + d = getattr(d, real_path) + else: + d = d.get(p) + + if hasattr(d, '_fields'): + field_name = d._reverse_db_field_map.get(db_field_name, + db_field_name) + + if field_name in d._fields: + default = d._fields.get(field_name).default + else: + default = None + + if default is not None: + if callable(default): + default = default() + if default != value: + continue + + del(set_data[path]) + unset_data[path] = 1 + return set_data, unset_data + + @classmethod + def _geo_indices(cls, inspected=None): + inspected = inspected or [] + geo_indices = [] + inspected.append(cls) + for field in cls._fields.values(): + if hasattr(field, 'document_type'): + field_cls = field.document_type + if field_cls in inspected: + continue + if hasattr(field_cls, '_geo_indices'): + geo_indices += field_cls._geo_indices(inspected) + elif field._geo_index: + geo_indices.append(field) + return geo_indices + + def __getstate__(self): + removals = ["get_%s_display" % k for k, v in self._fields.items() if v.choices] + for k in removals: + if hasattr(self, k): + delattr(self, k) + return self.__dict__ + + def __setstate__(self, __dict__): + self.__dict__ = __dict__ + self.__set_field_display() + + def __set_field_display(self): + for attr_name, field in self._fields.items(): + if field.choices: # dynamically adds a way to get the display value for a field with choices + setattr(self, 'get_%s_display' % attr_name, partial(self.__get_field_display, field=field)) + + def __get_field_display(self, field): + """Returns the display value for a choice field""" + value = getattr(self, field.name) + if field.choices and isinstance(field.choices[0], (list, tuple)): + return dict(field.choices).get(value, value) + return value + + def __iter__(self): + return iter(self._fields) + + def __getitem__(self, name): + """Dictionary-style field access, return a field's value if present. + """ + try: + if name in self._fields: + return getattr(self, name) + except AttributeError: + pass + raise KeyError(name) + + def __setitem__(self, name, value): + """Dictionary-style field access, set a field's value. + """ + # Ensure that the field exists before settings its value + if name not in self._fields: + raise KeyError(name) + return setattr(self, name, value) + + def __contains__(self, name): + try: + val = getattr(self, name) + return val is not None + except AttributeError: + return False + + def __len__(self): + return len(self._data) + + def __repr__(self): + try: + u = unicode(self).encode('utf-8') + except (UnicodeEncodeError, UnicodeDecodeError): + u = '[Bad Unicode data]' + return '<%s: %s>' % (self.__class__.__name__, u) + + def __str__(self): + if hasattr(self, '__unicode__'): + return unicode(self).encode('utf-8') + return '%s object' % self.__class__.__name__ + + def __eq__(self, other): + if isinstance(other, self.__class__) and hasattr(other, 'id'): + if self.id == other.id: + return True + return False + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + if self.pk is None: + # For new object + return super(BaseDocument, self).__hash__() + else: + return hash(self.pk) + + +class BaseList(list): + """A special list so we can watch any changes + """ + + _dereferenced = False + _instance = None + _name = None + + def __init__(self, list_items, instance, name): + self._instance = instance + self._name = name + super(BaseList, self).__init__(list_items) + + def __setitem__(self, *args, **kwargs): + self._mark_as_changed() + super(BaseList, self).__setitem__(*args, **kwargs) + + def __delitem__(self, *args, **kwargs): + self._mark_as_changed() + super(BaseList, self).__delitem__(*args, **kwargs) + + def __getstate__(self): + self.observer = None + return self + + def __setstate__(self, state): + self = state + return self + + def append(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).append(*args, **kwargs) + + def extend(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).extend(*args, **kwargs) + + def insert(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).insert(*args, **kwargs) + + def pop(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).pop(*args, **kwargs) + + def remove(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).remove(*args, **kwargs) + + def reverse(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).reverse(*args, **kwargs) + + def sort(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).sort(*args, **kwargs) + + def _mark_as_changed(self): + if hasattr(self._instance, '_mark_as_changed'): + self._instance._mark_as_changed(self._name) + + +class BaseDict(dict): + """A special dict so we can watch any changes + """ + + _dereferenced = False + _instance = None + _name = None + + def __init__(self, dict_items, instance, name): + self._instance = instance + self._name = name + super(BaseDict, self).__init__(dict_items) + + def __setitem__(self, *args, **kwargs): + self._mark_as_changed() + super(BaseDict, self).__setitem__(*args, **kwargs) + + def __delete__(self, *args, **kwargs): + self._mark_as_changed() + super(BaseDict, self).__delete__(*args, **kwargs) + + def __delitem__(self, *args, **kwargs): + self._mark_as_changed() + super(BaseDict, self).__delitem__(*args, **kwargs) + + def __delattr__(self, *args, **kwargs): + self._mark_as_changed() + super(BaseDict, self).__delattr__(*args, **kwargs) + + def __getstate__(self): + self.instance = None + self._dereferenced = False + return self + + def __setstate__(self, state): + self = state + return self + + def clear(self, *args, **kwargs): + self._mark_as_changed() + super(BaseDict, self).clear(*args, **kwargs) + + def pop(self, *args, **kwargs): + self._mark_as_changed() + super(BaseDict, self).pop(*args, **kwargs) + + def popitem(self, *args, **kwargs): + self._mark_as_changed() + super(BaseDict, self).popitem(*args, **kwargs) + + def update(self, *args, **kwargs): + self._mark_as_changed() + super(BaseDict, self).update(*args, **kwargs) + + def _mark_as_changed(self): + if hasattr(self._instance, '_mark_as_changed'): + self._instance._mark_as_changed(self._name) + +if sys.version_info < (2, 5): + # Prior to Python 2.5, Exception was an old-style class + import types + def subclass_exception(name, parents, unused): + import types + return types.ClassType(name, parents, {}) +else: + def subclass_exception(name, parents, module): + return type(name, parents, {'__module__': module}) diff --git a/src/archivematicaCommon/lib/externals/pyes/engine/dereference.py b/src/archivematicaCommon/lib/externals/pyes/engine/dereference.py new file mode 100644 index 0000000000..d83da7c32f --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/engine/dereference.py @@ -0,0 +1,188 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document) +from fields import (ReferenceField, ListField, DictField, MapField) +from connection import get_db +from queryset import QuerySet +from document import Document + + +class DeReference(object): + + def __call__(self, items, max_depth=1, instance=None, name=None): + """ + Cheaply dereferences the items to a set depth. + Also handles the convertion of complex data types. + + :param items: The iterable (dict, list, queryset) to be dereferenced. + :param max_depth: The maximum depth to recurse to + :param instance: The owning instance used for tracking changes by + :class:`~esengine.base.ComplexBaseField` + :param name: The name of the field, used for tracking changes by + :class:`~esengine.base.ComplexBaseField` + """ + if items is None or isinstance(items, basestring): + return items + + # cheapest way to convert a queryset to a list + # list(queryset) uses a count() query to determine length + if isinstance(items, QuerySet): + items = [i for i in items] + + self.max_depth = max_depth + + doc_type = None + if instance and instance._fields: + doc_type = instance._fields[name].field + + if isinstance(doc_type, ReferenceField): + doc_type = doc_type.document_type + if all([i.__class__ == doc_type for i in items]): + return items + + self.reference_map = self._find_references(items) + self.object_map = self._fetch_objects(doc_type=doc_type) + return self._attach_objects(items, 0, instance, name) + + def _find_references(self, items, depth=0): + """ + Recursively finds all db references to be dereferenced + + :param items: The iterable (dict, list, queryset) + :param depth: The current depth of recursion + """ + reference_map = {} + if not items or depth >= self.max_depth: + return reference_map + + # Determine the iterator to use + if not hasattr(items, 'items'): + iterator = enumerate(items) + else: + iterator = items.iteritems() + + # Recursively find dbreferences + depth += 1 + for k, item in iterator: + if hasattr(item, '_fields'): + for field_name, field in item._fields.iteritems(): + v = item._data.get(field_name, None) + if isinstance(v, (DBRef)): + reference_map.setdefault(field.document_type, []).append(v.id) + elif isinstance(v, (dict, SON)) and '_ref' in v: + reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id) + elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: + field_cls = getattr(getattr(field, 'field', None), 'document_type', None) + references = self._find_references(v, depth) + for key, refs in references.iteritems(): + if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): + key = field_cls + reference_map.setdefault(key, []).extend(refs) + elif isinstance(item, (DBRef)): + reference_map.setdefault(item.collection, []).append(item.id) + elif isinstance(item, (dict, SON)) and '_ref' in item: + reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id) + elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: + references = self._find_references(item, depth - 1) + for key, refs in references.iteritems(): + reference_map.setdefault(key, []).extend(refs) + + return reference_map + + def _fetch_objects(self, doc_type=None): + """Fetch all references and convert to their document objects + """ + object_map = {} + for col, dbrefs in self.reference_map.iteritems(): + keys = object_map.keys() + refs = list(set([dbref for dbref in dbrefs if str(dbref) not in keys])) + if hasattr(col, 'objects'): # We have a document class for the refs + references = col.objects.in_bulk(refs) + for key, doc in references.iteritems(): + object_map[key] = doc + else: # Generic reference: use the refs data to convert to document + if doc_type and not isinstance(doc_type, (ListField, DictField, MapField,) ): + references = doc_type._get_db()[col].find({'_id': {'$in': refs}}) + for ref in references: + doc = doc_type._from_son(ref) + object_map[doc.id] = doc + else: + references = get_db()[col].find({'_id': {'$in': refs}}) + for ref in references: + if '_cls' in ref: + doc = get_document(ref["_cls"])._from_son(ref) + else: + doc = doc_type._from_son(ref) + object_map[doc.id] = doc + return object_map + + def _attach_objects(self, items, depth=0, instance=None, name=None): + """ + Recursively finds all db references to be dereferenced + + :param items: The iterable (dict, list, queryset) + :param depth: The current depth of recursion + :param instance: The owning instance used for tracking changes by + :class:`~esengine.base.ComplexBaseField` + :param name: The name of the field, used for tracking changes by + :class:`~esengine.base.ComplexBaseField` + """ + if not items: + if isinstance(items, (BaseDict, BaseList)): + return items + + if instance: + if isinstance(items, dict): + return BaseDict(items, instance, name) + else: + return BaseList(items, instance, name) + + if isinstance(items, (dict, SON)): + if '_ref' in items: + return self.object_map.get(items['_ref'].id, items) + elif '_types' in items and '_cls' in items: + doc = get_document(items['_cls'])._from_son(items) + doc._data = self._attach_objects(doc._data, depth, doc, name) + return doc + + if not hasattr(items, 'items'): + is_list = True + iterator = enumerate(items) + data = [] + else: + is_list = False + iterator = items.iteritems() + data = {} + + depth += 1 + for k, v in iterator: + if is_list: + data.append(v) + else: + data[k] = v + + if k in self.object_map: + data[k] = self.object_map[k] + elif hasattr(v, '_fields'): + for field_name, field in v._fields.iteritems(): + v = data[k]._data.get(field_name, None) + if isinstance(v, (DBRef)): + data[k]._data[field_name] = self.object_map.get(v.id, v) + elif isinstance(v, (dict, SON)) and '_ref' in v: + data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v) + elif isinstance(v, dict) and depth <= self.max_depth: + data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name) + elif isinstance(v, (list, tuple)) and depth <= self.max_depth: + data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name) + elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: + data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name) + elif hasattr(v, 'id'): + data[k] = self.object_map.get(v.id, v) + + if instance and name: + if is_list: + return BaseList(data, instance, name) + return BaseDict(data, instance, name) + depth += 1 + return data diff --git a/src/archivematicaCommon/lib/externals/pyes/engine/document.py b/src/archivematicaCommon/lib/externals/pyes/engine/document.py new file mode 100644 index 0000000000..b42e88c516 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/engine/document.py @@ -0,0 +1,297 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import copy +from . import signals +from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, + BaseDict, BaseList) +from queryset import OperationError +from connection import get_db, DEFAULT_CONNECTION_NAME + +__all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument', + 'DynamicEmbeddedDocument', 'OperationError', 'InvalidCollectionError'] + + +class InvalidCollectionError(Exception): + pass + + +class EmbeddedDocument(BaseDocument): + """A :class:`~esengine.Document` that isn't stored in its own + collection. :class:`~esengine.EmbeddedDocument`\ s should be used as + fields on :class:`~esengine.Document`\ s through the + :class:`~esengine.EmbeddedDocumentField` field type. + """ + + __metaclass__ = DocumentMetaclass + + def __init__(self, *args, **kwargs): + super(EmbeddedDocument, self).__init__(*args, **kwargs) + self._changed_fields = [] + + def __delattr__(self, *args, **kwargs): + """Handle deletions of fields""" + field_name = args[0] + if field_name in self._fields: + default = self._fields[field_name].default + if callable(default): + default = default() + setattr(self, field_name, default) + else: + super(EmbeddedDocument, self).__delattr__(*args, **kwargs) + + +class Document(BaseDocument): + """The base class used for defining the structure and properties of + collections of documents stored in ElasticSearch. Inherit from this class, and + add fields as class attributes to define a document's structure. + Individual documents may then be created by making instances of the + :class:`~esengine.Document` subclass. + + By default, the ElasticSearch collection used to store documents created using a + :class:`~esengine.Document` subclass will be the name of the subclass + converted to lowercase. A different collection may be specified by + providing :attr:`collection` to the :attr:`meta` dictionary in the class + definition. + + A :class:`~esengine.Document` subclass may be itself subclassed, to + create a specialised version of the document that will be stored in the + same collection. To facilitate this behaviour, `_cls` and `_types` + fields are added to documents (hidden though the ESEngine interface + though). To disable this behaviour and remove the dependence on the + presence of `_cls` and `_types`, set :attr:`allow_inheritance` to + ``False`` in the :attr:`meta` dictionary. + + A :class:`~esengine.Document` may use a **Capped Collection** by + specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` + dictionary. :attr:`max_documents` is the maximum number of documents that + is allowed to be stored in the collection, and :attr:`max_size` is the + maximum size of the collection in bytes. If :attr:`max_size` is not + specified and :attr:`max_documents` is, :attr:`max_size` defaults to + 10000000 bytes (10MB). + + Indexes may be created by specifying :attr:`indexes` in the :attr:`meta` + dictionary. The value should be a list of field names or tuples of field + names. Index direction may be specified by prefixing the field names with + a **+** or **-** sign. + + By default, _types will be added to the start of every index (that + doesn't contain a list) if allow_inheritence is True. This can be + disabled by either setting types to False on the specific index or + by setting index_types to False on the meta dictionary for the document. + """ + __metaclass__ = TopLevelDocumentMetaclass + + @apply + def pk(): + """Primary key alias + """ + def fget(self): + return getattr(self, self._meta['id_field']) + def fset(self, value): + return setattr(self, self._meta['id_field'], value) + return property(fget, fset) + + @classmethod + def _get_db(cls): + """Some Model using other db_alias""" + return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME )) + + def save(self, safe=True, force_insert=False, validate=True, bulk=False, + cascade=None, cascade_kwargs=None, _refs=None): + """Save the :class:`~esengine.Document` to the database. If the + document already exists, it will be updated, otherwise it will be + created. + + If ``safe=True`` and the operation is unsuccessful, an + :class:`~esengine.OperationError` will be raised. + + :param safe: check if the operation succeeded before returning + :param force_insert: only try to create a new document, don't allow + updates of existing documents + :param validate: validates the document; set to ``False`` to skip. + :param cascade: Sets the flag for cascading saves. You can set a default by setting + "cascade" in the document __meta__ + :param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves + :param _refs: A list of processed references used in cascading saves + + """ + signals.pre_save.send(self.__class__, document=self) + + if validate: + self.validate() + es = self._get_db() + doc = self.to_es() + doc._meta.connection =es + doc._meta.index =doc._meta.connection.default_indices[0] + #doc.save(force_insert=force_insert, bulk=bulk) + doc.save(bulk=bulk) + if not bulk: + es.dirty = True + id_field = self._meta['id_field'] + self[id_field] = self._fields[id_field].to_python(doc._meta.id) + + self._changed_fields = [] + self._created = False + signals.post_save.send(self.__class__, document=self, created=doc._meta.version==1) + #TODO: add signals in bulk create + + def cascade_save(self, *args, **kwargs): + """Recursively saves any references / generic references on an object""" + from fields import ReferenceField, GenericReferenceField + _refs = kwargs.get('_refs', []) or [] + for name, cls in self._fields.items(): + if not isinstance(cls, (ReferenceField, GenericReferenceField)): + continue + ref = getattr(self, name) + if not ref: + continue + ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) + if ref and ref_id not in _refs: + _refs.append(ref_id) + kwargs["_refs"] = _refs + ref.save(**kwargs) + ref._changed_fields = [] + + def update(self, **kwargs): + """Performs an update on the :class:`~esengine.Document` + A convenience wrapper to :meth:`~esengine.QuerySet.update`. + + Raises :class:`OperationError` if called on an object that has not yet + been saved. + """ + if not self.pk: + raise OperationError('attempt to update a document not yet saved') + + # Need to add shard key to query, or you get an error + select_dict = {'pk': self.pk} + shard_key = self.__class__._meta.get('shard_key', tuple()) + for k in shard_key: + select_dict[k] = getattr(self, k) + return self.__class__.objects(**select_dict).update_one(**kwargs) + + def delete(self, safe=False): + """Delete the :class:`~esengine.Document` from the database. This + will only take effect if the document has been previously saved. + + :param safe: check if the operation succeeded before returning + """ + signals.pre_delete.send(self.__class__, document=self) + + try: + self.__class__.objects(pk=self.pk).delete(safe=safe) + except pymongo.errors.OperationFailure, err: + message = u'Could not delete document (%s)' % err.message + raise OperationError(message) + + signals.post_delete.send(self.__class__, document=self) + + def select_related(self, max_depth=1): + """Handles dereferencing of :class:`~bson.dbref.DBRef` objects to + a maximum depth in order to cut down the number queries to mongodb. + + .. versionadded:: 0.5 + """ + from dereference import DeReference + self._data = DeReference()(self._data, max_depth) + return self + + def reload(self, max_depth=1): + """Reloads all attributes from the database. + + .. versionadded:: 0.1.2 + .. versionchanged:: 0.6 Now chainable + """ + id_field = self._meta['id_field'] + obj = self.__class__.objects( + **{id_field: self[id_field]} + ).first().select_related(max_depth=max_depth) + for field in self._fields: + setattr(self, field, self._reload(field, obj[field])) + if self._dynamic: + for name in self._dynamic_fields.keys(): + setattr(self, name, self._reload(name, obj._data[name])) + self._changed_fields = obj._changed_fields + return obj + + def _reload(self, key, value): + """Used by :meth:`~esengine.Document.reload` to ensure the + correct instance is linked to self. + """ + if isinstance(value, BaseDict): + value = [(k, self._reload(k, v)) for k, v in value.items()] + value = BaseDict(value, self, key) + elif isinstance(value, BaseList): + value = [self._reload(key, v) for v in value] + value = BaseList(value, self, key) + elif isinstance(value, (EmbeddedDocument, DynamicEmbeddedDocument)): + value._changed_fields = [] + return value + + def to_dbref(self): + """Returns an instance of :class:`~bson.dbref.DBRef` useful in + `__raw__` queries.""" + if not self.pk: + msg = "Only saved documents can have a valid dbref" + raise OperationError(msg) + return DBRef(self.__class__._get_collection_name(), self.pk) + + @classmethod + def register_delete_rule(cls, document_cls, field_name, rule): + """This method registers the delete rules to apply when removing this + object. + """ + cls._meta['delete_rules'][(document_cls, field_name)] = rule + + @classmethod + def drop_collection(cls): + """Drops the entire collection associated with this + :class:`~esengine.Document` type from the database. + """ + from esengine.queryset import QuerySet + db = cls._get_db() + db.delete_mapping(db._default_indices[0], cls._get_collection_name()) + QuerySet._reset_already_indexed(cls) + + +class DynamicDocument(Document): + """A Dynamic Document class allowing flexible, expandable and uncontrolled + schemas. As a :class:`~esengine.Document` subclass, acts in the same + way as an ordinary document but has expando style properties. Any data + passed or set against the :class:`~esengine.DynamicDocument` that is + not a field is automatically converted into a + :class:`~esengine.BaseDynamicField` and data can be attributed to that + field. + + ..note:: + + There is one caveat on Dynamic Documents: fields cannot start with `_` + """ + __metaclass__ = TopLevelDocumentMetaclass + _dynamic = True + + def __delattr__(self, *args, **kwargs): + """Deletes the attribute by setting to None and allowing _delta to unset + it""" + field_name = args[0] + if field_name in self._dynamic_fields: + setattr(self, field_name, None) + else: + super(DynamicDocument, self).__delattr__(*args, **kwargs) + + +class DynamicEmbeddedDocument(EmbeddedDocument): + """A Dynamic Embedded Document class allowing flexible, expandable and + uncontrolled schemas. See :class:`~esengine.DynamicDocument` for more + information about dynamic documents. + """ + + __metaclass__ = DocumentMetaclass + _dynamic = True + + def __delattr__(self, *args, **kwargs): + """Deletes the attribute by setting to None and allowing _delta to unset + it""" + field_name = args[0] + setattr(self, field_name, None) + diff --git a/src/archivematicaCommon/lib/externals/pyes/engine/fields.py b/src/archivematicaCommon/lib/externals/pyes/engine/fields.py new file mode 100644 index 0000000000..c3ca27e7c3 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/engine/fields.py @@ -0,0 +1,1122 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import datetime +import time +import decimal +import re +import uuid +from base import (BaseField, ComplexBaseField, ValidationError, + get_document, BaseDocument) +from queryset import DO_NOTHING, QuerySet +from document import Document, EmbeddedDocument +from connection import get_db, DEFAULT_CONNECTION_NAME +from operator import itemgetter + + +try: + from PIL import Image, ImageOps +except ImportError: + Image = None + ImageOps = None + +try: + from cStringIO import StringIO +except ImportError: + from StringIO import StringIO + + +__all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField', + 'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField', + #'ObjectIdField', + 'ReferenceField', 'ValidationError', 'MapField', + 'DecimalField', 'ComplexDateTimeField', 'URLField', + 'GenericReferenceField', + 'FileField', 'BinaryField', + 'SortedListField', 'EmailField', 'GeoPointField', + 'ImageField', + 'SequenceField', 'UUIDField', 'GenericEmbeddedDocumentField'] + +RECURSIVE_REFERENCE_CONSTANT = 'self' + + +class StringField(BaseField): + """A unicode string field. + """ + + def __init__(self, regex=None, max_length=None, min_length=None, **kwargs): + self.regex = re.compile(regex) if regex else None + self.max_length = max_length + self.min_length = min_length + super(StringField, self).__init__(**kwargs) + + def to_python(self, value): + return unicode(value) + + def validate(self, value): + if not isinstance(value, (str, unicode)): + self.error('StringField only accepts string values') + + if self.max_length is not None and len(value) > self.max_length: + self.error('String value is too long') + + if self.min_length is not None and len(value) < self.min_length: + self.error('String value is too short') + + if self.regex is not None and self.regex.match(value) is None: + self.error('String value did not match validation regex') + + def lookup_member(self, member_name): + return None + + def prepare_query_value(self, op, value): + if not isinstance(op, basestring): + return value + + if op.lstrip('i') in ('startswith', 'endswith', 'contains', 'exact'): + flags = 0 + if op.startswith('i'): + flags = re.IGNORECASE + op = op.lstrip('i') + + regex = r'%s' + if op == 'startswith': + regex = r'^%s' + elif op == 'endswith': + regex = r'%s$' + elif op == 'exact': + regex = r'^%s$' + + # escape unsafe characters which could lead to a re.error + value = re.escape(value) + value = re.compile(regex % value, flags) + return value + + +class URLField(StringField): + """A field that validates input as an URL. + + .. versionadded:: 0.3 + """ + + URL_REGEX = re.compile( + r'^https?://' + r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?|' + r'localhost|' + r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' + r'(?::\d+)?' + r'(?:/?|[/?]\S+)$', re.IGNORECASE + ) + + def __init__(self, verify_exists=False, **kwargs): + self.verify_exists = verify_exists + super(URLField, self).__init__(**kwargs) + + def validate(self, value): + if not URLField.URL_REGEX.match(value): + self.error('Invalid URL: %s' % value) + + if self.verify_exists: + import urllib2 + try: + request = urllib2.Request(value) + urllib2.urlopen(request) + except Exception, e: + self.error('This URL appears to be a broken link: %s' % e) + + +class EmailField(StringField): + """A field that validates input as an E-Mail-Address. + + .. versionadded:: 0.4 + """ + + EMAIL_REGEX = re.compile( + r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom + r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' # quoted-string + r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$', re.IGNORECASE # domain + ) + + def validate(self, value): + if not EmailField.EMAIL_REGEX.match(value): + self.error('Invalid Mail-address: %s' % value) + + +class IntField(BaseField): + """An integer field. + """ + + def __init__(self, min_value=None, max_value=None, **kwargs): + self.min_value, self.max_value = min_value, max_value + super(IntField, self).__init__(**kwargs) + + def to_python(self, value): + return int(value) + + def validate(self, value): + try: + value = int(value) + except: + self.error('%s could not be converted to int' % value) + + if self.min_value is not None and value < self.min_value: + self.error('Integer value is too small') + + if self.max_value is not None and value > self.max_value: + self.error('Integer value is too large') + + def prepare_query_value(self, op, value): + return int(value) + + +class FloatField(BaseField): + """An floating point number field. + """ + + def __init__(self, min_value=None, max_value=None, **kwargs): + self.min_value, self.max_value = min_value, max_value + super(FloatField, self).__init__(**kwargs) + + def to_python(self, value): + return float(value) + + def validate(self, value): + if isinstance(value, int): + value = float(value) + if not isinstance(value, float): + self.error('FoatField only accepts float values') + + if self.min_value is not None and value < self.min_value: + self.error('Float value is too small') + + if self.max_value is not None and value > self.max_value: + self.error('Float value is too large') + + def prepare_query_value(self, op, value): + return float(value) + + +class DecimalField(BaseField): + """A fixed-point decimal number field. + + .. versionadded:: 0.3 + """ + + def __init__(self, min_value=None, max_value=None, **kwargs): + self.min_value, self.max_value = min_value, max_value + super(DecimalField, self).__init__(**kwargs) + + def to_python(self, value): + if not isinstance(value, basestring): + value = unicode(value) + return decimal.Decimal(value) + + def to_es(self, value): + return unicode(value) + + def validate(self, value): + if not isinstance(value, decimal.Decimal): + if not isinstance(value, basestring): + value = str(value) + try: + value = decimal.Decimal(value) + except Exception, exc: + self.error('Could not convert value to decimal: %s' % exc) + + if self.min_value is not None and value < self.min_value: + self.error('Decimal value is too small') + + if self.max_value is not None and value > self.max_value: + self.error('Decimal value is too large') + + +class BooleanField(BaseField): + """A boolean field type. + + .. versionadded:: 0.1.2 + """ + + def to_python(self, value): + return bool(value) + + def validate(self, value): + if not isinstance(value, bool): + self.error('BooleanField only accepts boolean values') + + +class DateTimeField(BaseField): + """A datetime field. + + Note: Microseconds are rounded to the nearest millisecond. + Pre UTC microsecond support is effecively broken. + Use :class:`~esengine.fields.ComplexDateTimeField` if you + need accurate microsecond support. + """ + + def validate(self, value): + if not isinstance(value, (datetime.datetime, datetime.date)): + self.error(u'cannot parse date "%s"' % value) + + def to_es(self, value): + return self.prepare_query_value(None, value) + + def prepare_query_value(self, op, value): + if value is None: + return value + if isinstance(value, datetime.datetime): + return value + if isinstance(value, datetime.date): + return datetime.datetime(value.year, value.month, value.day) + + # Attempt to parse a datetime: + # value = smart_str(value) + # split usecs, because they are not recognized by strptime. + if '.' in value: + try: + value, usecs = value.split('.') + usecs = int(usecs) + except ValueError: + return None + else: + usecs = 0 + kwargs = {'microsecond': usecs} + try: # Seconds are optional, so try converting seconds first. + return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M:%S')[:6], + **kwargs) + except ValueError: + try: # Try without seconds. + return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M')[:5], + **kwargs) + except ValueError: # Try without hour/minutes/seconds. + try: + return datetime.datetime(*time.strptime(value, '%Y-%m-%d')[:3], + **kwargs) + except ValueError: + return None + + +class ComplexDateTimeField(StringField): + """ + ComplexDateTimeField handles microseconds exactly instead of rounding + like DateTimeField does. + + Derives from a StringField so you can do `gte` and `lte` filtering by + using lexicographical comparison when filtering / sorting strings. + + The stored string has the following format: + + YYYY,MM,DD,HH,MM,SS,NNNNNN + + Where NNNNNN is the number of microseconds of the represented `datetime`. + The `,` as the separator can be easily modified by passing the `separator` + keyword when initializing the field. + + .. versionadded:: 0.5 + """ + + def __init__(self, separator=',', **kwargs): + self.names = ['year', 'month', 'day', 'hour', 'minute', 'second', + 'microsecond'] + self.separtor = separator + super(ComplexDateTimeField, self).__init__(**kwargs) + + def _leading_zero(self, number): + """ + Converts the given number to a string. + + If it has only one digit, a leading zero so as it has always at least + two digits. + """ + if int(number) < 10: + return "0%s" % number + else: + return str(number) + + def _convert_from_datetime(self, val): + """ + Convert a `datetime` object to a string representation (which will be + stored in ElasticSearch). This is the reverse function of + `_convert_from_string`. + + >>> a = datetime(2011, 6, 8, 20, 26, 24, 192284) + >>> RealDateTimeField()._convert_from_datetime(a) + '2011,06,08,20,26,24,192284' + """ + data = [] + for name in self.names: + data.append(self._leading_zero(getattr(val, name))) + return ','.join(data) + + def _convert_from_string(self, data): + """ + Convert a string representation to a `datetime` object (the object you + will manipulate). This is the reverse function of + `_convert_from_datetime`. + + >>> a = '2011,06,08,20,26,24,192284' + >>> ComplexDateTimeField()._convert_from_string(a) + datetime.datetime(2011, 6, 8, 20, 26, 24, 192284) + """ + data = data.split(',') + data = map(int, data) + values = {} + for i in range(7): + values[self.names[i]] = data[i] + return datetime.datetime(**values) + + def __get__(self, instance, owner): + data = super(ComplexDateTimeField, self).__get__(instance, owner) + if data is None: + return datetime.datetime.now() + return self._convert_from_string(data) + + def __set__(self, instance, value): + value = self._convert_from_datetime(value) + return super(ComplexDateTimeField, self).__set__(instance, value) + + def validate(self, value): + if not isinstance(value, datetime.datetime): + self.error('Only datetime objects may used in a ' + 'ComplexDateTimeField') + + def to_python(self, value): + return self._convert_from_string(value) + + def to_es(self, value): + return self._convert_from_datetime(value) + + def prepare_query_value(self, op, value): + return self._convert_from_datetime(value) + + +class EmbeddedDocumentField(BaseField): + """An embedded document field - with a declared document_type. + Only valid values are subclasses of :class:`~esengine.EmbeddedDocument`. + """ + + def __init__(self, document_type, **kwargs): + if not isinstance(document_type, basestring): + if not issubclass(document_type, EmbeddedDocument): + self.error('Invalid embedded document class provided to an ' + 'EmbeddedDocumentField') + self.document_type_obj = document_type + super(EmbeddedDocumentField, self).__init__(**kwargs) + + @property + def document_type(self): + if isinstance(self.document_type_obj, basestring): + if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: + self.document_type_obj = self.owner_document + else: + self.document_type_obj = get_document(self.document_type_obj) + return self.document_type_obj + + def to_python(self, value): + if not isinstance(value, self.document_type): + return self.document_type._from_son(value) + return value + + def to_es(self, value): + if not isinstance(value, self.document_type): + return value + return self.document_type.to_es(value) + + def validate(self, value): + """Make sure that the document instance is an instance of the + EmbeddedDocument subclass provided when the document was defined. + """ + # Using isinstance also works for subclasses of self.document + if not isinstance(value, self.document_type): + self.error('Invalid embedded document instance provided to an ' + 'EmbeddedDocumentField') + self.document_type.validate(value) + + def lookup_member(self, member_name): + return self.document_type._fields.get(member_name) + + def prepare_query_value(self, op, value): + return self.to_es(value) + + +class GenericEmbeddedDocumentField(BaseField): + """A generic embedded document field - allows any + :class:`~esengine.EmbeddedDocument` to be stored. + + Only valid values are subclasses of :class:`~esengine.EmbeddedDocument`. + """ + + def prepare_query_value(self, op, value): + return self.to_es(value) + + def to_python(self, value): + if isinstance(value, dict): + doc_cls = get_document(value['_cls']) + value = doc_cls._from_son(value) + + return value + + def validate(self, value): + if not isinstance(value, EmbeddedDocument): + self.error('Invalid embedded document instance provided to an ' + 'GenericEmbeddedDocumentField') + + value.validate() + + def to_es(self, document): + if document is None: + return None + + data = document.to_es() + if not '_cls' in data: + data['_cls'] = document._class_name + return data + + +class ListField(ComplexBaseField): + """A list field that wraps a standard field, allowing multiple instances + of the field to be used as a list in the database. + + .. note:: + Required means it cannot be empty - as the default for ListFields is [] + """ + + # ListFields cannot be indexed with _types - ElasticSearch doesn't support this + _index_with_types = False + + def __init__(self, field=None, **kwargs): + self.field = field + kwargs.setdefault('default', lambda: []) + super(ListField, self).__init__(**kwargs) + + def validate(self, value): + """Make sure that a list of valid fields is being used. + """ + if (not isinstance(value, (list, tuple, QuerySet)) or + isinstance(value, basestring)): + self.error('Only lists and tuples may be used in a list field') + super(ListField, self).validate(value) + + def prepare_query_value(self, op, value): + if self.field: + if op in ('set', 'unset') and (not isinstance(value, basestring) + and not isinstance(value, BaseDocument) + and hasattr(value, '__iter__')): + return [self.field.prepare_query_value(op, v) for v in value] + return self.field.prepare_query_value(op, value) + return super(ListField, self).prepare_query_value(op, value) + + +class SortedListField(ListField): + """A ListField that sorts the contents of its list before writing to + the database in order to ensure that a sorted list is always + retrieved. + + .. warning:: + There is a potential race condition when handling lists. If you set / + save the whole list then other processes trying to save the whole list + as well could overwrite changes. The safest way to append to a list is + to perform a push operation. + + .. versionadded:: 0.4 + .. versionchanged:: 0.6 - added reverse keyword + """ + + _ordering = None + _order_reverse = False + + def __init__(self, field, **kwargs): + if 'ordering' in kwargs.keys(): + self._ordering = kwargs.pop('ordering') + if 'reverse' in kwargs.keys(): + self._order_reverse = kwargs.pop('reverse') + super(SortedListField, self).__init__(field, **kwargs) + + def to_es(self, value): + value = super(SortedListField, self).to_es(value) + if self._ordering is not None: + return sorted(value, key=itemgetter(self._ordering), reverse=self._order_reverse) + return sorted(value, reverse=self._order_reverse) + + +class DictField(ComplexBaseField): + """A dictionary field that wraps a standard Python dictionary. This is + similar to an embedded document, but the structure is not defined. + + .. note:: + Required means it cannot be empty - as the default for ListFields is [] + + .. versionadded:: 0.3 + .. versionchanged:: 0.5 - Can now handle complex / varying types of data + """ + + def __init__(self, basecls=None, field=None, *args, **kwargs): + self.field = field + self.basecls = basecls or BaseField + if not issubclass(self.basecls, BaseField): + self.error('DictField only accepts dict values') + kwargs.setdefault('default', lambda: {}) + super(DictField, self).__init__(*args, **kwargs) + + def validate(self, value): + """Make sure that a list of valid fields is being used. + """ + if not isinstance(value, dict): + self.error('Only dictionaries may be used in a DictField') + + if any(k for k in value.keys() if not isinstance(k, basestring)): + self.error('Invalid dictionary key - documents must have only string keys') + if any(('.' in k or '$' in k) for k in value.keys()): + self.error('Invalid dictionary key name - keys may not contain "."' + ' or "$" characters') + super(DictField, self).validate(value) + + def lookup_member(self, member_name): + return DictField(basecls=self.basecls, db_field=member_name) + + def prepare_query_value(self, op, value): + match_operators = ['contains', 'icontains', 'startswith', + 'istartswith', 'endswith', 'iendswith', + 'exact', 'iexact'] + + if op in match_operators and isinstance(value, basestring): + return StringField().prepare_query_value(op, value) + + return super(DictField, self).prepare_query_value(op, value) + + +class MapField(DictField): + """A field that maps a name to a specified field type. Similar to + a DictField, except the 'value' of each item must match the specified + field type. + + .. versionadded:: 0.5 + """ + + def __init__(self, field=None, *args, **kwargs): + if not isinstance(field, BaseField): + self.error('Argument to MapField constructor must be a valid ' + 'field') + super(MapField, self).__init__(field=field, *args, **kwargs) + + +class ReferenceField(BaseField): + """A reference to a document that will be automatically dereferenced on + access (lazily). + + Use the `reverse_delete_rule` to handle what should happen if the document + the field is referencing is deleted. EmbeddedDocuments, DictFields and + MapFields do not support reverse_delete_rules and an `InvalidDocumentError` + will be raised if trying to set on one of these Document / Field types. + + The options are: + + * DO_NOTHING - don't do anything (default). + * NULLIFY - Updates the reference to null. + * CASCADE - Deletes the documents associated with the reference. + * DENY - Prevent the deletion of the reference object. + + .. versionchanged:: 0.5 added `reverse_delete_rule` + """ + + def __init__(self, document_type, reverse_delete_rule=DO_NOTHING, **kwargs): + """Initialises the Reference Field. + + :param reverse_delete_rule: Determines what to do when the referring + object is deleted + """ + if not isinstance(document_type, basestring): + if not issubclass(document_type, (Document, basestring)): + self.error('Argument to ReferenceField constructor must be a ' + 'document class or a string') + self.document_type_obj = document_type + self.reverse_delete_rule = reverse_delete_rule + super(ReferenceField, self).__init__(**kwargs) + + @property + def document_type(self): + if isinstance(self.document_type_obj, basestring): + if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: + self.document_type_obj = self.owner_document + else: + self.document_type_obj = get_document(self.document_type_obj) + return self.document_type_obj + + def __get__(self, instance, owner): + """Descriptor to allow lazy dereferencing. + """ + if instance is None: + # Document class being used rather than a document object + return self + + # Get value from document instance if available + value = instance._data.get(self.name) +# # Dereference DBRefs +# if isinstance(value, (DBRef)): +# value = self.document_type._get_db().dereference(value) +# if value is not None: +# instance._data[self.name] = self.document_type._from_son(value) + + return super(ReferenceField, self).__get__(instance, owner) + + def to_es(self, document): + id_field_name = self.document_type._meta['id_field'] + id_field = self.document_type._fields[id_field_name] + + if isinstance(document, Document): + # We need the id from the saved object to create the DBRef + id_ = document.id + if id_ is None: + self.error('You can only reference documents once they have' + ' been saved to the database') + else: + id_ = document + + id_ = id_field.to_es(id_) + collection = self.document_type._get_collection_name() + return DBRef(collection, id_) + + def prepare_query_value(self, op, value): + if value is None: + return None + + return self.to_es(value) + + def validate(self, value): + + if isinstance(value, Document) and value.id is None: + self.error('You can only reference documents once they have been ' + 'saved to the database') + + def lookup_member(self, member_name): + return self.document_type._fields.get(member_name) + + +class GenericReferenceField(BaseField): + """A reference to *any* :class:`~esengine.document.Document` subclass + that will be automatically dereferenced on access (lazily). + + ..note :: Any documents used as a generic reference must be registered in the + document registry. Importing the model will automatically register it. + + .. versionadded:: 0.3 + """ + + def __get__(self, instance, owner): + if instance is None: + return self + + value = instance._data.get(self.name) + if isinstance(value, dict): + instance._data[self.name] = self.dereference(value) + + return super(GenericReferenceField, self).__get__(instance, owner) + + def validate(self, value): + if not isinstance(value, (Document, DBRef)): + self.error('GenericReferences can only contain documents') + + # We need the id from the saved object to create the DBRef + if isinstance(value, Document) and value.id is None: + self.error('You can only reference documents once they have been' + ' saved to the database') + + def dereference(self, value): + doc_cls = get_document(value['_cls']) + reference = value['_ref'] + doc = doc_cls._get_db().dereference(reference) + if doc is not None: + doc = doc_cls._from_son(doc) + return doc + + def to_es(self, document): + if document is None: + return None + + id_field_name = document.__class__._meta['id_field'] + id_field = document.__class__._fields[id_field_name] + + if isinstance(document, Document): + # We need the id from the saved object to create the DBRef + id_ = document.id + if id_ is None: + self.error('You can only reference documents once they have' + ' been saved to the database') + else: + id_ = document + + id_ = id_field.to_es(id_) + collection = document._get_collection_name() + ref = DBRef(collection, id_) + return {'_cls': document._class_name, '_ref': ref} + + def prepare_query_value(self, op, value): + if value is None: + return None + + return self.to_es(value) + + +class BinaryField(BaseField): + """A binary data field. + """ + + def __init__(self, max_bytes=None, **kwargs): + self.max_bytes = max_bytes + super(BinaryField, self).__init__(**kwargs) + + def to_es(self, value): + return Binary(value) + + def to_python(self, value): + # Returns str not unicode as this is binary data + return str(value) + + def validate(self, value): + if not isinstance(value, str): + self.error('BinaryField only accepts string values') + + if self.max_bytes is not None and len(value) > self.max_bytes: + self.error('Binary value is too long') + + +class FileField(BaseField): + """A GridFS storage field. + + .. versionadded:: 0.4 + .. versionchanged:: 0.5 added optional size param for read + .. versionchanged:: 0.6 added db_alias for multidb support + """ + + def __init__(self, + db_alias=DEFAULT_CONNECTION_NAME, + collection_name="fs", **kwargs): + super(FileField, self).__init__(**kwargs) + self.collection_name = collection_name + self.db_alias = db_alias + + def __get__(self, instance, owner): + if instance is None: + return self + + # Check if a file already exists for this model + grid_file = instance._data.get(self.name) + self.grid_file = grid_file + if isinstance(self.grid_file, self.proxy_class): + if not self.grid_file.key: + self.grid_file.key = self.name + self.grid_file.instance = instance + return self.grid_file + return self.proxy_class(key=self.name, instance=instance, + db_alias=self.db_alias, + collection_name=self.collection_name) + + def __set__(self, instance, value): + key = self.name + if isinstance(value, file) or isinstance(value, str): + # using "FileField() = file/string" notation + grid_file = instance._data.get(self.name) + # If a file already exists, delete it + if grid_file: + try: + grid_file.delete() + except: + pass + # Create a new file with the new data + grid_file.put(value) + else: + # Create a new proxy object as we don't already have one + instance._data[key] = self.proxy_class(key=key, instance=instance, + collection_name=self.collection_name) + instance._data[key].put(value) + else: + instance._data[key] = value + + instance._mark_as_changed(key) + + def to_es(self, value): + # Store the GridFS file id in ElasticSearch + if isinstance(value, self.proxy_class) and value.grid_id is not None: + return value.grid_id + return None + + def to_python(self, value): + if value is not None: + return self.proxy_class(value, + collection_name=self.collection_name, + db_alias=self.db_alias) + + def validate(self, value): + if value.grid_id is not None: + if not isinstance(value, self.proxy_class): + self.error('FileField only accepts GridFSProxy values') + if not isinstance(value.grid_id, ObjectId): + self.error('Invalid GridFSProxy value') + + +class ImageGridFsProxy(BaseField): + """ + Proxy for ImageField + + versionadded: 0.6 + """ + def put(self, file_obj, **kwargs): + """ + Insert a image in database + applying field properties (size, thumbnail_size) + """ + field = self.instance._fields[self.key] + + try: + img = Image.open(file_obj) + except: + raise ValidationError('Invalid image') + + if (field.size and (img.size[0] > field.size['width'] or + img.size[1] > field.size['height'])): + size = field.size + + if size['force']: + img = ImageOps.fit(img, + (size['width'], + size['height']), + Image.ANTIALIAS) + else: + img.thumbnail((size['width'], + size['height']), + Image.ANTIALIAS) + + thumbnail = None + if field.thumbnail_size: + size = field.thumbnail_size + + if size['force']: + thumbnail = ImageOps.fit(img, + (size['width'], + size['height']), + Image.ANTIALIAS) + else: + thumbnail = img.copy() + thumbnail.thumbnail((size['width'], + size['height']), + Image.ANTIALIAS) + + if thumbnail: + thumb_id = self._put_thumbnail(thumbnail, + img.format) + else: + thumb_id = None + + w, h = img.size + + io = StringIO() + img.save(io, img.format) + io.seek(0) + + return super(ImageGridFsProxy, self).put(io, + width=w, + height=h, + format=img.format, + thumbnail_id=thumb_id, + **kwargs) + + def delete(self, *args, **kwargs): + #deletes thumbnail + out = self.get() + if out and out.thumbnail_id: + self.fs.delete(out.thumbnail_id) + + return super(ImageGridFsProxy, self).delete(*args, **kwargs) + + def _put_thumbnail(self, thumbnail, format, **kwargs): + w, h = thumbnail.size + + io = StringIO() + thumbnail.save(io, format) + io.seek(0) + + return self.fs.put(io, width=w, + height=h, + format=format, + **kwargs) + @property + def size(self): + """ + return a width, height of image + """ + out = self.get() + if out: + return out.width, out.height + + @property + def format(self): + """ + return format of image + ex: PNG, JPEG, GIF, etc + """ + out = self.get() + if out: + return out.format + + @property + def thumbnail(self): + """ + return a gridfs.grid_file.GridOut + representing a thumbnail of Image + """ + out = self.get() + if out and out.thumbnail_id: + return self.fs.get(out.thumbnail_id) + + def write(self, *args, **kwargs): + raise RuntimeError("Please use \"put\" method instead") + + def writelines(self, *args, **kwargs): + raise RuntimeError("Please use \"put\" method instead") + + +class ImproperlyConfigured(Exception): + pass + + +class ImageField(FileField): + """ + A Image File storage field. + + @size (width, height, force): + max size to store images, if larger will be automatically resized + ex: size=(800, 600, True) + + @thumbnail (width, height, force): + size to generate a thumbnail + + .. versionadded:: 0.6 + """ + proxy_class = ImageGridFsProxy + + def __init__(self, size=None, thumbnail_size=None, + collection_name='images', **kwargs): + if not Image: + raise ImproperlyConfigured("PIL library was not found") + + params_size = ('width', 'height', 'force') + extra_args = dict(size=size, thumbnail_size=thumbnail_size) + for att_name, att in extra_args.items(): + if att and (isinstance(att, tuple) or isinstance(att, list)): + setattr(self, att_name, dict( + map(None, params_size, att))) + else: + setattr(self, att_name, None) + + super(ImageField, self).__init__( + collection_name=collection_name, + **kwargs) + + +class GeoPointField(BaseField): + """A list storing a latitude and longitude. + + .. versionadded:: 0.4 + """ + + _geo_index = True + + def validate(self, value): + """Make sure that a geo-value is of type (x, y) + """ + if not isinstance(value, (list, tuple)): + self.error('GeoPointField can only accept tuples or lists ' + 'of (x, y)') + + if not len(value) == 2: + self.error('Value must be a two-dimensional point') + if (not isinstance(value[0], (float, int)) and + not isinstance(value[1], (float, int))): + self.error('Both values in point must be float or int') + + +class SequenceField(IntField): + """Provides a sequental counter (see http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers) + + .. note:: + + Although traditional databases often use increasing sequence + numbers for primary keys. In ElasticSearch, the preferred approach is to + use Object IDs instead. The concept is that in a very large + cluster of machines, it is easier to create an object ID than have + global, uniformly increasing sequence numbers. + + .. versionadded:: 0.5 + """ + def __init__(self, collection_name=None, db_alias = None, *args, **kwargs): + self.collection_name = collection_name or 'esengine.counters' + self.db_alias = db_alias or DEFAULT_CONNECTION_NAME + return super(SequenceField, self).__init__(*args, **kwargs) + + def generate_new_value(self): + """ + Generate and Increment the counter + """ + sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(), + self.name) + collection = get_db(alias = self.db_alias )[self.collection_name] + counter = collection.find_and_modify(query={"_id": sequence_id}, + update={"$inc": {"next": 1}}, + new=True, + upsert=True) + return counter['next'] + + def __get__(self, instance, owner): + + if instance is None: + return self + + if not instance._data: + return + + value = instance._data.get(self.name) + + if not value and instance._initialised: + value = self.generate_new_value() + instance._data[self.name] = value + instance._mark_as_changed(self.name) + + return value + + def __set__(self, instance, value): + + if value is None and instance._initialised: + value = self.generate_new_value() + + return super(SequenceField, self).__set__(instance, value) + + def to_python(self, value): + if value is None: + value = self.generate_new_value() + return value + + +class UUIDField(BaseField): + """A UUID field. + + .. versionadded:: 0.6 + """ + + def __init__(self, **kwargs): + super(UUIDField, self).__init__(**kwargs) + + def to_python(self, value): + if not isinstance(value, basestring): + value = unicode(value) + return uuid.UUID(value) + + def to_es(self, value): + return unicode(value) + + def validate(self, value): + if not isinstance(value, uuid.UUID): + if not isinstance(value, basestring): + value = str(value) + try: + value = uuid.UUID(value) + except Exception, exc: + self.error('Could not convert to UUID: %s' % exc) diff --git a/src/archivematicaCommon/lib/externals/pyes/engine/queryset.py b/src/archivematicaCommon/lib/externals/pyes/engine/queryset.py new file mode 100644 index 0000000000..9e54d5911d --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/engine/queryset.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +__author__ = 'alberto' +# Delete rules +DO_NOTHING = 0 +NULLIFY = 1 +CASCADE = 2 +DENY = 3 + + +class DoesNotExist(Exception): + pass + + +class MultipleObjectsReturned(Exception): + pass + + +class InvalidQueryError(Exception): + pass + + +class OperationError(Exception): + pass + +class QuerySet(object): + pass + +class QuerySetManager(object): + pass + diff --git a/src/archivematicaCommon/lib/externals/pyes/engine/signals.py b/src/archivematicaCommon/lib/externals/pyes/engine/signals.py new file mode 100644 index 0000000000..9a7a3f06c1 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/engine/signals.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# -*- coding: utf-8 -*- + +__all__ = ['pre_init', 'post_init', 'pre_save', 'post_save', + 'pre_delete', 'post_delete'] + +signals_available = False +try: + from blinker import Namespace + signals_available = True +except ImportError: + class Namespace(object): + def signal(self, name, doc=None): + return _FakeSignal(name, doc) + + class _FakeSignal(object): + """If blinker is unavailable, create a fake class with the same + interface that allows sending of signals but will fail with an + error on anything else. Instead of doing anything on send, it + will just ignore the arguments and do nothing instead. + """ + + def __init__(self, name, doc=None): + self.name = name + self.__doc__ = doc + + def _fail(self, *args, **kwargs): + raise RuntimeError('signalling support is unavailable ' + 'because the blinker library is ' + 'not installed.') + send = lambda *a, **kw: None + connect = disconnect = has_receivers_for = receivers_for = \ + temporarily_connected_to = _fail + del _fail + +# the namespace for code signals. If you are not esengine code, do +# not put signals in here. Create your own namespace instead. +_signals = Namespace() + +pre_init = _signals.signal('pre_init') +post_init = _signals.signal('post_init') +pre_save = _signals.signal('pre_save') +post_save = _signals.signal('post_save') +pre_delete = _signals.signal('pre_delete') +post_delete = _signals.signal('post_delete') +pre_bulk_insert = _signals.signal('pre_bulk_insert') +post_bulk_insert = _signals.signal('post_bulk_insert') diff --git a/src/archivematicaCommon/lib/externals/pyes/es.py b/src/archivematicaCommon/lib/externals/pyes/es.py new file mode 100644 index 0000000000..5284aecccc --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/es.py @@ -0,0 +1,1864 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from __future__ import with_statement + +try: + # For Python >= 2.6 + import json +except ImportError: + # For Python < 2.6 or people using a newer version of simplejson + import simplejson as json + +import random +from datetime import date, datetime +from urllib import urlencode +from urlparse import urlunsplit +import base64 +import time +from decimal import Decimal +from urllib import quote +import threading +import copy +from urlparse import urlparse + +try: + from .connection import connect as thrift_connect + from .pyesthrift.ttypes import Method, RestRequest + + thrift_enable = True +except ImportError: + from .fakettypes import Method, RestRequest + + thrift_enable = False + +from .connection_http import connect as http_connect +from . import logger +from .mappings import Mapper + +from .convert_errors import raise_if_error +from .exceptions import (ElasticSearchException, IndexAlreadyExistsException, + IndexMissingException, InvalidQuery, + ReduceSearchPhaseException, VersionConflictEngineException, + BulkOperationException) + +__all__ = ['ES', 'file_to_attachment', 'decode_json'] + +# +# models +# + +class DotDict(dict): + def __getattr__(self, attr): + if attr.startswith('__'): + raise AttributeError + return self.get(attr, None) + + __setattr__ = dict.__setitem__ + + __delattr__ = dict.__delitem__ + + def __deepcopy__(self, memo): + return DotDict([(copy.deepcopy(k, memo), copy.deepcopy(v, memo)) for k, v in self.items()]) + +class ElasticSearchModel(DotDict): + def __init__(self, *args, **kwargs): + self._meta = DotDict() + self.__initialised = True + if len(args) == 2 and isinstance(args[0], ES): + item = args[1] + self.update(item.pop("_source", DotDict())) + self.update(item.pop("fields", {})) + self._meta = DotDict([(k.lstrip("_"), v) for k, v in item.items()]) + self._meta.parent = self._meta.pop("_parent", None) + self._meta.connection = args[0] + else: + self.update(dict(*args, **kwargs)) + + def __setattr__(self, key, value): + if not self.__dict__.has_key( + '_ElasticSearchModel__initialised'): # this test allows attributes to be set in the __init__ method + return dict.__setattr__(self, key, value) + elif self.__dict__.has_key(key): # any normal attributes are handled normally + dict.__setattr__(self, key, value) + else: + self.__setitem__(key, value) + + def get_meta(self): + return self._meta + + def delete(self, bulk=False): + """ + Delete the object + """ + meta = self._meta + conn = meta['connection'] + conn.delete(meta.index, meta.type, meta.id, bulk=bulk) + + def save(self, bulk=False, id=None, parent=None, force=False): + """ + Save the object and returns id + """ + meta = self._meta + conn = meta['connection'] + id = id or meta.get("id", None) + parent = parent or meta.get('parent', None) + version = meta.get('version', None) + if force: + version = None + res = conn.index(self, + meta.index, meta.type, id, parent=parent, bulk=bulk, version=version, force_insert=force) + if not bulk: + self._meta.id = res._id + self._meta.version = res._version + return res._id + return id + + def reload(self): + meta = self._meta + conn = meta['connection'] + res = conn.get(meta.index, meta.type, meta["id"]) + self.update(res) + + + def get_id(self): + """ Force the object saveing to get an id""" + _id = self._meta.get("id", None) + if _id is None: + _id = self.save() + return _id + + def get_bulk(self, create=False): + """Return bulk code""" + result = [] + op_type = "index" + if create: + op_type = "create" + meta = self._meta + cmd = {op_type: {"_index": meta.index, "_type": meta.type}} + if meta.parent: + cmd[op_type]['_parent'] = meta.parent + if meta.version: + cmd[op_type]['_version'] = meta.version + if meta.id: + cmd[op_type]['_id'] = meta.id + result.append(json.dumps(cmd, cls=self._meta.connection.encoder)) + result.append("\n") + result.append(json.dumps(self, cls=self._meta.connection.encoder)) + result.append("\n") + return ''.join(result) + + +def file_to_attachment(filename, filehandler=None): + """ + Convert a file to attachment + """ + if filehandler: + return {'_name': filename, + 'content': base64.b64encode(filehandler.read()) + } + with open(filename, 'rb') as _file: + return {'_name': filename, + 'content': base64.b64encode(_file.read()) + } + + +def _is_bulk_item_ok(item): + if "index" in item: + return "ok" in item["index"] + elif "delete" in item: + return "ok" in item["delete"] + else: + # unknown response type; be conservative + return False + + +def _raise_exception_if_bulk_item_failed(bulk_result): + errors = [item for item in bulk_result["items"] if not _is_bulk_item_ok(item)] + if len(errors) > 0: + raise BulkOperationException(errors, bulk_result) + return None + + +class ESJsonEncoder(json.JSONEncoder): + def default(self, value): + """Convert rogue and mysterious data types. + Conversion notes: + + - ``datetime.date`` and ``datetime.datetime`` objects are + converted into datetime strings. + """ + + if isinstance(value, datetime): + return value.isoformat() + elif isinstance(value, date): + dt = datetime(value.year, value.month, value.day, 0, 0, 0) + return dt.isoformat() + elif isinstance(value, Decimal): + return float(str(value)) + else: + # use no special encoding and hope for the best + return value + + +class ESJsonDecoder(json.JSONDecoder): + def __init__(self, *args, **kwargs): + kwargs['object_hook'] = self.dict_to_object + json.JSONDecoder.__init__(self, *args, **kwargs) + + def string_to_datetime(self, obj): + """Decode a datetime string to a datetime object + """ + if isinstance(obj, basestring): + try: + return datetime(*time.strptime(obj, "%Y-%m-%dT%H:%M:%S")[:6]) + except ValueError: + pass + return obj + + def dict_to_object(self, d): + """ + Decode datetime value from string to datetime + """ + for k, v in d.items(): + if isinstance(v, basestring) and len(v) == 19: + try: + d[k] = datetime(*time.strptime(v, "%Y-%m-%dT%H:%M:%S")[:6]) + except ValueError: + pass + elif isinstance(v, list): + d[k] = [self.string_to_datetime(elem) for elem in v] + return DotDict(d) + + +class BaseBulker(object): + """ + Base class to implement a bulker strategy + + """ + + def __init__(self, conn, bulk_size=400, raise_on_bulk_item_failure=False): + self.conn = conn + self._bulk_size = bulk_size + # protects bulk_data + self.bulk_lock = threading.RLock() + with self.bulk_lock: + self.bulk_data = [] + self.raise_on_bulk_item_failure = raise_on_bulk_item_failure + + def get_bulk_size(self): + """ + Get the current bulk_size + + :return a int: the size of the bulk holder + """ + return self._bulk_size + + def set_bulk_size(self, bulk_size): + """ + Set the bulk size + + :param bulk_size the bulker size + """ + self._bulk_size = bulk_size + self.flush_bulk() + + bulk_size = property(get_bulk_size, set_bulk_size) + + def add(self, content): + raise NotImplementedError + + def flush_bulk(self, forced=False): + raise NotImplementedError + + +class ListBulker(BaseBulker): + """ + A bulker that store data in a list + """ + + def __init__(self, conn, bulk_size=400, raise_on_bulk_item_failure=False): + super(ListBulker, self).__init__(conn=conn, bulk_size=bulk_size, + raise_on_bulk_item_failure=raise_on_bulk_item_failure) + with self.bulk_lock: + self.bulk_data = [] + + def add(self, content): + with self.bulk_lock: + self.bulk_data.append(content) + + def flush_bulk(self, forced=False): + with self.bulk_lock: + if forced or len(self.bulk_data) >= self.bulk_size: + batch = self.bulk_data + self.bulk_data = [] + else: + return None + + if len(batch) > 0: + bulk_result = self.conn._send_request("POST", + "/_bulk", + "\n".join(batch) + "\n") + + if self.raise_on_bulk_item_failure: + _raise_exception_if_bulk_item_failed(bulk_result) + + return bulk_result + + +class ES(object): + """ + ES connection object. + """ + #static to easy overwrite + encoder = ESJsonEncoder + decoder = ESJsonDecoder + + def __init__(self, server="localhost:9200", timeout=30.0, bulk_size=400, + encoder=None, decoder=None, + max_retries=3, + default_indices=None, + default_types=None, + dump_curl=False, + model=ElasticSearchModel, + basic_auth=None, + raise_on_bulk_item_failure=False, + document_object_field=None, + bulker_class=ListBulker): + """ + Init a es object. + Servers can be defined in different forms: + + - host:port with protocol guess (i.e. 127.0.0.1:9200 protocol -> http + 127.0.0.1:9500 protocol -> thrift ) + - type://host:port (i.e. http://127.0.0.1:9200 https://127.0.0.1:9200 thrift://127.0.0.1:9500) + + - (type, host, port) (i.e. tuple ("http", "127.0.0.1", "9200") ("https", "127.0.0.1", "9200") + ("thrift", "127.0.0.1", "9500")). This is the prefered form. + + :param server: the server name, it can be a list of servers. + :param timeout: timeout for a call + :param bulk_size: size of bulk operation + :param encoder: tojson encoder + :param max_retries: number of max retries for server if a server is down + :param basic_auth: Dictionary with 'username' and 'password' keys for HTTP Basic Auth. + :param model: used to objectify the dictinary. If None, the raw dict is returned. + + + :param dump_curl: If truthy, this will dump every query to a curl file. If + this is set to a string value, it names the file that output is sent + to. Otherwise, it should be set to an object with a write() method, + which output will be written to. + + :param raise_on_bulk_item_failure: raises an exception if an item in a + bulk operation fails + + :param document_object_field: a class to use as base document field in mapper + """ + if default_indices is None: + default_indices = ["_all"] + self.timeout = timeout + self.default_indices = default_indices + self.max_retries = max_retries + self.cluster = None + self.debug_dump = False + self.cluster_name = "undefined" + self.basic_auth = basic_auth + self.connection = None + self._mappings = None + self.document_object_field = document_object_field + + if model is None: + model = lambda connection, model: model + self.model = model + if dump_curl: + if isinstance(dump_curl, basestring): + self.dump_curl = open(dump_curl, "wb") + elif hasattr(dump_curl, 'write'): + self.dump_curl = dump_curl + else: + raise TypeError("dump_curl parameter must be supplied with a " + "string or an object with a write() method") + else: + self.dump_curl = None + + #used in bulk + self._bulk_size = bulk_size #size of the bulk + self.bulker = bulker_class(self, bulk_size=bulk_size, raise_on_bulk_item_failure=raise_on_bulk_item_failure) + self.bulker_class = bulker_class + self._raise_on_bulk_item_failure = raise_on_bulk_item_failure + + self.info = {} #info about the current server + if encoder: + self.encoder = encoder + if decoder: + self.decoder = decoder + if isinstance(server, (str, unicode)): + self.servers = [server] + elif isinstance(server, tuple): + self.servers = [server] + else: + self.servers = server + + self.default_types = default_types or [] + #check the servers variable + self._check_servers() + #init connections + self._init_connection() + + def __del__(self): + """ + Destructor + """ + # Don't bother getting the lock + if self.bulker: + # It's not safe to rely on the destructor to flush the queue: + # the Python documentation explicitly states "It is not guaranteed + # that __del__() methods are called for objects that still exist " + # when the interpreter exits." + logger.error("pyes object %s is being destroyed, but bulk " + "operations have not been flushed. Call force_bulk()!", + self) + # Do our best to save the client anyway... + self.bulker.force_bulk() + + def _check_servers(self): + """Check the servers variable and convert in a valid tuple form""" + new_servers = [] + + def check_format(server): + if server.scheme not in ["thrift", "http", "https"]: + raise RuntimeError("Unable to recognize protocol: \"%s\"" % _type) + + if server.scheme == "thrift": + if not thrift_enable: + raise RuntimeError("If you want to use thrift, please install thrift. \"pip install thrift\"") + if server.port is None: + raise RuntimeError("If you want to use thrift, please provide a port number") + + new_servers.append(server) + + for server in self.servers: + if isinstance(server, (tuple, list)): + if len(list(server)) != 3: + raise RuntimeError("Invalid server definition: \"%s\"" % server) + _type, host, port = server + server = urlparse('%s://%s:%s' % (_type, host, port)) + check_format(server) + elif isinstance(server, basestring): + if server.startswith(("thrift:", "http:", "https:")): + server = urlparse(server) + check_format(server) + continue + else: + tokens = [t for t in server.split(":") if t.strip()] + if len(tokens) == 2: + host = tokens[0] + try: + port = int(tokens[1]) + except ValueError: + raise RuntimeError("Invalid port: \"%s\"" % port) + + if 9200 <= port <= 9299: + _type = "http" + elif 9500 <= port <= 9599: + _type = "thrift" + else: + raise RuntimeError("Unable to recognize port-type: \"%s\"" % port) + + server = urlparse('%s://%s:%s' % (_type, host, port)) + check_format(server) + + self.servers = new_servers + + def _init_connection(self): + """ + Create initial connection pool + """ + #detect connectiontype + if not self.servers: + raise RuntimeError("No server defined") + + server = random.choice(self.servers) + if server.scheme in ["http", "https"]: + self.connection = http_connect( + filter(lambda server: server.scheme in ["http", "https"], self.servers), + timeout=self.timeout + , + basic_auth=self.basic_auth + , + max_retries=self.max_retries) + return + elif server.scheme == "thrift": + self.connection = thrift_connect( + filter(lambda server: server.scheme == "thrift", self.servers), + timeout=self.timeout + , + max_retries=self.max_retries) + + def _discovery(self): + """ + Find other servers asking nodes to given server + """ + data = self.cluster_nodes() + self.cluster_name = data["cluster_name"] + for _, nodedata in data["nodes"].items(): + server = nodedata['http_address'].replace("]", "").replace("inet[", "http:/") + if server not in self.servers: + self.servers.append(server) + self._init_connection() + return self.servers + + def _get_bulk_size(self): + """ + Get the current bulk_size + + :return a int: the size of the bulk holder + """ + return self._bulk_size + + def _set_bulk_size(self, bulk_size): + """ + Set the bulk size + + :param bulk_size the bulker size + """ + self._bulk_size = bulk_size + self.bulker.bulk_size = bulk_size + + bulk_size = property(_get_bulk_size, _set_bulk_size) + + def _get_raise_on_bulk_item_failure(self): + """ + Get the raise_on_bulk_item_failure status + + :return a bool: the status of raise_on_bulk_item_failure + """ + return self._bulk_size + + def _set_raise_on_bulk_item_failure(self, raise_on_bulk_item_failure): + """ + Set the raise_on_bulk_item_failure parameter + + :param raise_on_bulk_item_failure a bool the status of the raise_on_bulk_item_failure + """ + self._raise_on_bulk_item_failure = raise_on_bulk_item_failure + self.bulker.raise_on_bulk_item_failure = raise_on_bulk_item_failure + + raise_on_bulk_item_failure = property(_get_raise_on_bulk_item_failure, _set_raise_on_bulk_item_failure) + + + def _send_request(self, method, path, body=None, params=None, headers=None, raw=False): + if params is None: + params = {} + if headers is None: + headers = {} + # prepare the request + if not path.startswith("/"): + path = "/" + path + if not self.connection: + self._init_connection() + if body: + if not isinstance(body, dict) and hasattr(body, "as_dict"): + body = body.as_dict() + + if isinstance(body, dict): + body = json.dumps(body, cls=self.encoder) + else: + body = "" + # Patched to make start parameter work as it's supposed to + # As per https://github.com/aparo/pyes/issues/153 + if 'start' in params: + params['from']=params['start'] + del params['start'] + request = RestRequest(method=Method._NAMES_TO_VALUES[method.upper()], + uri=path, parameters=params, headers=headers, body=body) + if self.dump_curl is not None: + self._dump_curl_request(request) + + # execute the request + response = self.connection.execute(request) + + if method == "HEAD": + if response.status != 200: + return False + return True + + # handle the response + try: + decoded = json.loads(response.body, cls=self.decoder) + except ValueError: + try: + decoded = json.loads(response.body, cls=ESJsonDecoder) + except ValueError: + # The only known place where we get back a body which can't be + # parsed as JSON is when no handler is found for a request URI. + # In this case, the body is actually a good message to return + # in the exception. + raise ElasticSearchException(response.body, response.status, response.body) + if response.status != 200: + raise_if_error(response.status, decoded) + if not raw and isinstance(decoded, dict): + decoded = DotDict(decoded) + return decoded + + def _make_path(self, path_components): + """ + Smush together the path components. Empty components will be ignored. + """ + path_components = [quote(str(component), "") for component in path_components if component] + path = '/'.join(path_components) + if not path.startswith('/'): + path = '/' + path + return path + + def _query_call(self, query_type, query, indices=None, doc_types=None, **query_params): + """ + This can be used for search and count calls. + These are identical api calls, except for the type of query. + """ + querystring_args = query_params + indices = self._validate_indices(indices) + if doc_types is None: + doc_types = self.default_types + if isinstance(doc_types, basestring): + doc_types = [doc_types] + body = query + path = self._make_path([','.join(indices), ','.join(doc_types), query_type]) + return self._send_request('GET', path, body, params=querystring_args) + + def _validate_indices(self, indices=None): + """Return a valid list of indices. + + `indices` may be a string or a list of strings. + If `indices` is not supplied, returns the default_indices. + + """ + if indices is None: + indices = self.default_indices + if isinstance(indices, basestring): + indices = [indices] + return indices + + def _dump_curl_request(self, request): + print >> self.dump_curl, "# [%s]" % datetime.now().isoformat() + params = {'pretty': 'true'} + params.update(request.parameters) + method = Method._VALUES_TO_NAMES[request.method] + server = self.servers[0] + url = urlunsplit((server.scheme, server.netloc, request.uri, urlencode(params), '')) + curl_cmd = "curl -X%s '%s'" % (method, url) + if request.body: + curl_cmd += " -d '%s'" % request.body + print >> self.dump_curl, curl_cmd + + def _get_default_indices(self): + return self._default_indices + + def _set_default_indices(self, indices): + if indices is None: + raise ValueError("default_indices cannot be set to None") + self._default_indices = self._validate_indices(indices) + + default_indices = property(_get_default_indices, _set_default_indices) + del _get_default_indices, _set_default_indices + + @property + def mappings(self): + if self._mappings is None: + self._mappings = Mapper(self.get_mapping(indices=self.default_indices), + connection=self, + document_object_field=self.document_object_field) + return self._mappings + + #---- Admin commands + def status(self, indices=None): + """ + Retrieve the status of one or more indices + """ + indices = self._validate_indices(indices) + path = self._make_path([','.join(indices), '_status']) + return self._send_request('GET', path) + + def aliases(self, indices=None): + """ + Retrieve the aliases of one or more indices + """ + if indices is None: + indices = self.default_indices + path = self._make_path([','.join(indices), '_aliases']) + return self._send_request('GET', path) + + def create_bulker(self): + """ + Create a bulker object and return it to allow to manage custom bulk policies + """ + return self.bulker_class(self, bulk_size=self.bulk_size, + raise_on_bulk_item_failure=self.raise_on_bulk_item_failure) + + + def create_index(self, index, settings=None): + """ + Creates an index with optional settings. + Settings must be a dictionary which will be converted to JSON. + Elasticsearch also accepts yaml, but we are only passing JSON. + """ + return self._send_request('PUT', index, settings) + + def create_index_if_missing(self, index, settings=None): + """Creates an index if it doesn't already exist. + + If supplied, settings must be a dictionary. + + """ + try: + return self.create_index(index, settings) + except IndexAlreadyExistsException, e: + return e.result + + def delete_index(self, index): + """Deletes an index. + """ + return self._send_request('DELETE', index) + + def exists_index(self, index): + """ + Check if an index exists. + """ + return self._send_request('HEAD', index) + + def delete_index_if_exists(self, index): + """Deletes an index if it exists. + + """ + if self.exists_index(index): + return self.delete_index(index) + + def get_indices(self, include_aliases=False): + """Get a dict holding an entry for each index which exists. + + If include_alises is True, the dict will also contain entries for + aliases. + + The key for each entry in the dict is the index or alias name. The + value is a dict holding the following properties: + + - num_docs: Number of documents in the index or alias. + - alias_for: Only present for an alias: holds a list of indices which + this is an alias for. + + """ + state = self.cluster_state() + status = self.status() + result = {} + indices_status = status['indices'] + indices_metadata = state['metadata']['indices'] + for index in sorted(indices_status.keys()): + info = indices_status[index] + metadata = indices_metadata[index] + num_docs = info['docs']['num_docs'] + result[index] = dict(num_docs=num_docs) + if not include_aliases: + continue + for alias in metadata.get('aliases', []): + try: + alias_obj = result[alias] + except KeyError: + alias_obj = {} + result[alias] = alias_obj + alias_obj['num_docs'] = alias_obj.get('num_docs', 0) + num_docs + try: + alias_obj['alias_for'].append(index) + except KeyError: + alias_obj['alias_for'] = [index] + return result + + def get_closed_indices(self): + """ + Get all closed indices. + """ + state = self.cluster_state() + status = self.status() + + indices_metadata = set(state['metadata']['indices'].keys()) + indices_status = set(status['indices'].keys()) + + return indices_metadata.difference(indices_status) + + def get_alias(self, alias): + """Get the index or indices pointed to by a given alias. + + Raises IndexMissingException if the alias does not exist. + + Otherwise, returns a list of index names. + + """ + status = self.status([alias]) + return status['indices'].keys() + + def change_aliases(self, commands): + """Change the aliases stored. + + `commands` is a list of 3-tuples; (command, index, alias), where + `command` is one of "add" or "remove", and `index` and `alias` are the + index and alias to add or remove. + + """ + body = { + 'actions': [ + {command: dict(index=index, alias=alias)} + for (command, index, alias) in commands + ] + } + return self._send_request('POST', "_aliases", body) + + def add_alias(self, alias, indices=None): + """Add an alias to point to a set of indices. + + """ + indices = self._validate_indices(indices) + return self.change_aliases(['add', index, alias] + for index in indices) + + def delete_alias(self, alias, indices=None): + """Delete an alias. + + The specified index or indices are deleted from the alias, if they are + in it to start with. This won't report an error even if the indices + aren't present in the alias. + + """ + indices = self._validate_indices(indices) + return self.change_aliases(['remove', index, alias] + for index in indices) + + def set_alias(self, alias, indices=None): + """Set an alias. + + This handles removing the old list of indices pointed to by the alias. + + Warning: there is a race condition in the implementation of this + function - if another client modifies the indices which this alias + points to during this call, the old value of the alias may not be + correctly set. + + """ + indices = self._validate_indices(indices) + try: + old_indices = self.get_alias(alias) + except IndexMissingException: + old_indices = [] + commands = [['remove', index, alias] for index in old_indices] + commands.extend([['add', index, alias] for index in indices]) + if len(commands) > 0: + return self.change_aliases(commands) + + def close_index(self, index): + """ + Close an index. + """ + return self._send_request('POST', "/%s/_close" % index) + + def open_index(self, index): + """ + Open an index. + """ + return self._send_request('POST', "/%s/_open" % index) + + def flush(self, indices=None, refresh=None): + """ + Flushes one or more indices (clear memory) + """ + self.force_bulk() + + indices = self._validate_indices(indices) + + path = self._make_path([','.join(indices), '_flush']) + args = {} + if refresh is not None: + args['refresh'] = refresh + return self._send_request('POST', path, params=args) + + def refresh(self, indices=None, timesleep=None): + """ + Refresh one or more indices + + timesleep: seconds to wait + """ + self.force_bulk() + indices = self._validate_indices(indices) + + path = self._make_path([','.join(indices), '_refresh']) + result = self._send_request('POST', path) + if timesleep: + time.sleep(timesleep) + self.cluster_health(wait_for_status='green') + return result + + + def optimize(self, indices=None, + wait_for_merge=False, + max_num_segments=None, + only_expunge_deletes=False, + refresh=True, + flush=True): + """Optimize one or more indices. + + `indices` is the list of indices to optimise. If not supplied, all + default_indices are optimised. + + `wait_for_merge` (boolean): If True, the operation will not return + until the merge has been completed. Defaults to False. + + `max_num_segments` (integer): The number of segments to optimize to. To + fully optimize the index, set it to 1. Defaults to half the number + configured by the merge policy (which in turn defaults to 10). + + `only_expunge_deletes` (boolean): Should the optimize process only + expunge segments with deletes in it. In Lucene, a document is not + deleted from a segment, just marked as deleted. During a merge process + of segments, a new segment is created that does have those deletes. + This flag allow to only merge segments that have deletes. Defaults to + false. + + `refresh` (boolean): Should a refresh be performed after the optimize. + Defaults to true. + + `flush` (boolean): Should a flush be performed after the optimize. + Defaults to true. + + """ + indices = self._validate_indices(indices) + path = self._make_path([','.join(indices), '_optimize']) + params = dict( + wait_for_merge=wait_for_merge, + only_expunge_deletes=only_expunge_deletes, + refresh=refresh, + flush=flush, + ) + if max_num_segments is not None: + params['max_num_segments'] = max_num_segments + result = self._send_request('POST', path, params=params) + return result + + def analyze(self, text, index=None, analyzer=None, tokenizer=None, filters=None, field=None): + """ + Performs the analysis process on a text and return the tokens breakdown of the text + """ + if filters is None: + filters = [] + argsets = 0 + args = {} + + if analyzer: + args['analyzer'] = analyzer + argsets += 1 + if tokenizer or filters: + if tokenizer: + args['tokenizer'] = tokenizer + if filters: + args['filters'] = ','.join(filters) + argsets += 1 + if field: + args['field'] = field + argsets += 1 + + if argsets > 1: + raise ValueError('Argument conflict: Speficy either analyzer, tokenizer/filters or field') + + if field and index is None: + raise ValueError('field can only be specified with an index') + + path = self._make_path([index, '_analyze']) + return self._send_request('POST', path, text, args) + + def gateway_snapshot(self, indices=None): + """ + Gateway snapshot one or more indices + + :param indices: a list of indices or None for default configured. + """ + indices = self._validate_indices(indices) + path = self._make_path([','.join(indices), '_gateway', 'snapshot']) + return self._send_request('POST', path) + + def put_mapping(self, doc_type=None, mapping=None, indices=None): + """ + Register specific mapping definition for a specific type against one or more indices. + """ + indices = self._validate_indices(indices) + if mapping is None: + mapping = {} + if hasattr(mapping, "to_json"): + mapping = mapping.to_json() + if hasattr(mapping, "as_dict"): + mapping = mapping.as_dict() + + if doc_type: + path = self._make_path([','.join(indices), doc_type, "_mapping"]) + if doc_type not in mapping: + mapping = {doc_type: mapping} + else: + path = self._make_path([','.join(indices), "_mapping"]) + + return self._send_request('PUT', path, mapping) + + def get_mapping(self, doc_type=None, indices=None): + """ + Register specific mapping definition for a specific type against one or more indices. + """ + indices = self._validate_indices(indices) + if doc_type: + path = self._make_path([','.join(indices), doc_type, "_mapping"]) + else: + path = self._make_path([','.join(indices), "_mapping"]) + return self._send_request('GET', path) + + def collect_info(self): + """ + Collect info about the connection and fill the info dictionary. + """ + try: + info = {} + res = self._send_request('GET', "/") + info['server'] = {} + info['server']['name'] = res['name'] + info['server']['version'] = res['version'] + info['allinfo'] = res + info['status'] = self.status() + info['aliases'] = self.aliases() + self.info = info + return True + except: + self.info = {} + return False + + #--- cluster + def cluster_health(self, indices=None, level="cluster", wait_for_status=None, + wait_for_relocating_shards=None, timeout=30): + """ + Check the current :ref:`cluster health <es-guide-reference-api-admin-cluster-health>`. + Request Parameters + + The cluster health API accepts the following request parameters: + + :param level: Can be one of cluster, indices or shards. Controls the + details level of the health information returned. + Defaults to *cluster*. + :param wait_for_status: One of green, yellow or red. Will wait (until + the timeout provided) until the status of the + cluster changes to the one provided. + By default, will not wait for any status. + :param wait_for_relocating_shards: A number controlling to how many + relocating shards to wait for. + Usually will be 0 to indicate to + wait till all relocation have + happened. Defaults to not to wait. + :param timeout: A time based parameter controlling how long to wait + if one of the wait_for_XXX are provided. + Defaults to 30s. + """ + path = self._make_path(["_cluster", "health"]) + mapping = {} + if level != "cluster": + if level not in ["cluster", "indices", "shards"]: + raise ValueError("Invalid level: %s" % level) + mapping['level'] = level + if wait_for_status: + if wait_for_status not in ["green", "yellow", "red"]: + raise ValueError("Invalid wait_for_status: %s" % wait_for_status) + mapping['wait_for_status'] = wait_for_status + + mapping['timeout'] = "%ds" % timeout + return self._send_request('GET', path, mapping) + + def cluster_state(self, filter_nodes=None, filter_routing_table=None, + filter_metadata=None, filter_blocks=None, + filter_indices=None): + """ + Retrieve the :ref:`cluster state <es-guide-reference-api-admin-cluster-state>`. + + :param filter_nodes: set to **true** to filter out the **nodes** part + of the response. + :param filter_routing_table: set to **true** to filter out the + **routing_table** part of the response. + :param filter_metadata: set to **true** to filter out the **metadata** + part of the response. + :param filter_blocks: set to **true** to filter out the **blocks** + part of the response. + :param filter_indices: when not filtering metadata, a comma separated + list of indices to include in the response. + + """ + path = self._make_path(["_cluster", "state"]) + parameters = {} + + if filter_nodes is not None: + parameters['filter_nodes'] = filter_nodes + + if filter_routing_table is not None: + parameters['filter_routing_table'] = filter_routing_table + + if filter_metadata is not None: + parameters['filter_metadata'] = filter_metadata + + if filter_blocks is not None: + parameters['filter_blocks'] = filter_blocks + + if filter_blocks is not None: + if isinstance(filter_indices, basestring): + parameters['filter_indices'] = filter_indices + else: + parameters['filter_indices'] = ",".join(filter_indices) + + return self._send_request('GET', path, params=parameters) + + def cluster_nodes(self, nodes=None): + """ + The cluster :ref:`nodes info <es-guide-reference-api-admin-cluster-state>` API allows to retrieve one or more (or all) of + the cluster nodes information. + """ + parts = ["_cluster", "nodes"] + if nodes: + parts.append(",".join(nodes)) + path = self._make_path(parts) + return self._send_request('GET', path) + + def cluster_stats(self, nodes=None): + """ + The cluster :ref:`nodes info <es-guide-reference-api-admin-cluster-nodes-stats>` API allows to retrieve one or more (or all) of + the cluster nodes information. + """ + parts = ["_cluster", "nodes", "stats"] + if nodes: + parts = ["_cluster", "nodes", ",".join(nodes), "stats"] + + path = self._make_path(parts) + return self._send_request('GET', path) + + + def index_raw_bulk(self, header, document): + """ + Function helper for fast inserting + + header and document must be string "\n" ended + """ + self.bulker.add(u"%s%s" % (header, document)) + return self.flush_bulk() + + def index(self, doc, index, doc_type, id=None, parent=None, + force_insert=False, + op_type=None, + bulk=False, version=None, querystring_args=None): + """ + Index a typed JSON document into a specific index and make it searchable. + """ + if querystring_args is None: + querystring_args = {} + + if bulk: + if op_type is None: + op_type = "index" + if force_insert: + op_type = "create" + cmd = {op_type: {"_index": index, "_type": doc_type}} + if parent: + cmd[op_type]['_parent'] = parent + if version: + cmd[op_type]['_version'] = version + if 'routing' in querystring_args: + cmd[op_type]['_routing'] = querystring_args['routing'] + if 'percolate' in querystring_args: + cmd[op_type]['percolate'] = querystring_args['percolate'] + if id: + cmd[op_type]['_id'] = id + + if isinstance(doc, dict): + doc = json.dumps(doc, cls=self.encoder) + command = "%s\n%s" % (json.dumps(cmd, cls=self.encoder), doc) + self.bulker.add(command) + return self.flush_bulk() + + if force_insert: + querystring_args['op_type'] = 'create' + if op_type: + querystring_args['op_type'] = op_type + + if parent: + if not isinstance(parent, basestring): + parent = str(parent) + querystring_args['parent'] = parent + + if version: + if not isinstance(version, basestring): + version = str(version) + querystring_args['version'] = version + + if id is None: + request_method = 'POST' + else: + request_method = 'PUT' + + path = self._make_path([index, doc_type, id]) + return self._send_request(request_method, path, doc, querystring_args) + + + def index_stats(self, indices=None): + """ + http://www.elasticsearch.org/guide/reference/api/admin-indices-stats.html + """ + parts = ["_stats"] + if indices: + if isinstance(indices, basestring): + indices = [indices] + parts = [",".join(indices), "_stats"] + + path = self._make_path(parts) + return self._send_request('GET', path) + + + def flush_bulk(self, forced=False): + """ + Send pending operations if forced or if the bulk threshold is exceeded. + """ + return self.bulker.flush_bulk(forced) + + def force_bulk(self): + """ + Force executing of all bulk data. + + Return the bulk response + """ + return self.flush_bulk(True) + + def put_file(self, filename, index, doc_type, id=None, name=None): + """ + Store a file in a index + """ + querystring_args = {} + + if id is None: + request_method = 'POST' + else: + request_method = 'PUT' + path = self._make_path([index, doc_type, id]) + doc = file_to_attachment(filename) + if name: + doc["_name"] = name + return self._send_request(request_method, path, doc, querystring_args) + + def get_file(self, index, doc_type, id=None): + """ + Return the filename and memory data stream + """ + data = self.get(index, doc_type, id) + return data['_name'], base64.standard_b64decode(data['content']) + #return data["_source"]['_name'], base64.standard_b64decode(data["_source"]['content']) + + def update(self, extra_doc, index, doc_type, id, querystring_args=None, + update_func=None, attempts=2): + """ + Update an already indexed typed JSON document. + + The update happens client-side, i.e. the current document is retrieved, + updated locally and finally pushed to the server. This may repeat up to + ``attempts`` times in case of version conflicts. + + :param update_func: A callable ``update_func(current_doc, extra_doc)`` + that computes and returns the updated doc. Alternatively it may + update ``current_doc`` in place and return None. The default + ``update_func`` is ``dict.update``. + + :param attempts: How many times to retry in case of version conflict. + """ + if querystring_args is None: + querystring_args = {} + + if update_func is None: + update_func = dict.update + + for attempt in xrange(attempts - 1, -1, -1): + current_doc = self.get(index, doc_type, id, **querystring_args) + new_doc = update_func(current_doc, extra_doc) + if new_doc is None: + new_doc = current_doc + try: + return self.index(new_doc, index, doc_type, id, + version=current_doc._meta.version, querystring_args=querystring_args) + except VersionConflictEngineException: + if attempt <= 0: + raise + self.refresh(index) + + def delete(self, index, doc_type, id, bulk=False, querystring_args=None): + """ + Delete a typed JSON document from a specific index based on its id. + If bulk is True, the delete operation is put in bulk mode. + """ + querystring_args = querystring_args or {} + if bulk: + cmd = {"delete": {"_index": index, "_type": doc_type, + "_id": id}} + self.bulker.add(json.dumps(cmd, cls=self.encoder)) + return self.flush_bulk() + + path = self._make_path([index, doc_type, id]) + return self._send_request('DELETE', path, params=querystring_args) + + def delete_by_query(self, indices, doc_types, query, **request_params): + """ + Delete documents from one or more indices and one or more types based on a query. + """ + querystring_args = request_params + indices = self._validate_indices(indices) + if doc_types is None: + doc_types = [] + if isinstance(doc_types, basestring): + doc_types = [doc_types] + + if hasattr(query, 'to_query_json'): + # Then is a Query object. + body = query.to_query_json() + elif isinstance(query, dict): + # A direct set of search parameters. + body = json.dumps(query, cls=ES.encoder) + else: + raise InvalidQuery("delete_by_query() must be supplied with a Query object, or a dict") + + path = self._make_path([','.join(indices), ','.join(doc_types), '_query']) + return self._send_request('DELETE', path, body, querystring_args) + + def delete_mapping(self, index, doc_type): + """ + Delete a typed JSON document type from a specific index. + """ + path = self._make_path([index, doc_type]) + return self._send_request('DELETE', path) + + def get(self, index, doc_type, id, fields=None, routing=None, **get_params): + """ + Get a typed JSON document from an index based on its id. + """ + path = self._make_path([index, doc_type, id]) + if fields is not None: + get_params["fields"] = ",".join(fields) + if routing: + get_params["routing"] = routing + return self.model(self, self._send_request('GET', path, params=get_params)) + + + def factory_object(self, index, doc_type, data=None, id=None, vertex=False): + """ + Create a stub object to be manipulated + """ + data = data or {} + obj = ElasticSearchModel() + obj._meta.index = index + obj._meta.type = doc_type + obj._meta.connection = self + if id: + obj._meta.id = id + if data: + obj.update(data) + if vertex: + obj.force_vertex() + return obj + + def mget(self, ids, index=None, doc_type=None, routing=None, **get_params): + """ + Get multi JSON documents. + + ids can be: + list of tuple: (index, type, id) + list of ids: index and doc_type are required + + """ + if not ids: + return [] + + body = [] + for value in ids: + if isinstance(value, tuple): + if len(value) == 3: + a, b, c = value + body.append({"_index": a, + "_type": b, + "_id": c}) + elif len(value) == 4: + a, b, c, d = value + body.append({"_index": a, + "_type": b, + "_id": c, + "fields": d}) + + else: + if index is None: + raise InvalidQuery("index value is required for id") + if doc_type is None: + raise InvalidQuery("doc_type value is required for id") + body.append({"_index": index, + "_type": doc_type, + "_id": value}) + + if routing: + get_params["routing"] = routing + results = self._send_request('GET', "/_mget", + body={'docs': body}, + params=get_params) + if 'docs' in results: + model = self.model + return [model(self, item) for item in results['docs']] + return [] + + def search_raw(self, query, indices=None, doc_types=None, **query_params): + """Execute a search against one or more indices to get the search hits. + + `query` must be a Search object, a Query object, or a custom + dictionary of search parameters using the query DSL to be passed + directly. + + """ + indices = self._validate_indices(indices) + if doc_types is None: + doc_types = [] + elif isinstance(doc_types, basestring): + doc_types = [doc_types] + + if hasattr(query, 'to_search_json'): + # Common case - a Search or Query object. + query.encoder = self.encoder + body = query.to_search_json() + elif isinstance(query, dict): + # A direct set of search parameters. + body = json.dumps(query, cls=self.encoder) + else: + raise InvalidQuery("search() must be supplied with a Search or Query object, or a dict") + + return self._query_call("_search", body, indices, doc_types, **query_params) + + def search(self, query, indices=None, doc_types=None, **query_params): + """Execute a search against one or more indices to get the resultset. + + `query` must be a Search object, a Query object, or a custom + dictionary of search parameters using the query DSL to be passed + directly. + + """ + indices = self._validate_indices(indices) + if doc_types is None: + doc_types = [] + elif isinstance(doc_types, basestring): + doc_types = [doc_types] + if hasattr(query, 'search'): + query = query.search() + + if hasattr(query, 'to_search_json') or isinstance(query, dict): + pass + else: + raise InvalidQuery("search() must be supplied with a Search or Query object, or a dict") + return ResultSet(connection=self, query=query, indices=indices, doc_types=doc_types, query_params=query_params) + + # scan method is no longer working due to change in ES.search behavior. May no longer warrant its own method. + # def scan(self, query, indices=None, doc_types=None, scroll_timeout="10m", **query_params): + # """Return a generator which will scan against one or more indices and iterate over the search hits. (currently support only by ES Master) + # + # `query` must be a Search object, a Query object, or a custom + # dictionary of search parameters using the query DSL to be passed + # directly. + # + # """ + # results = self.search(query=query, indices=indices, doc_types=doc_types, search_type="scan", scroll=scroll_timeout, **query_params) + # while True: + # scroll_id = results["_scroll_id"] + # results = self._send_request('GET', "_search/scroll", scroll_id, {"scroll":scroll_timeout}) + # total = len(results["hits"]["hits"]) + # if not total: + # break + # yield results + + def search_scroll(self, scroll_id, scroll_timeout="10m"): + """ + Executes a scrolling given an scroll_id + """ + return self._send_request('GET', "_search/scroll", scroll_id, {"scroll": scroll_timeout}) + + def reindex(self, query, indices=None, doc_types=None, **query_params): + """ + Execute a search query against one or more indices and and reindex the hits. + query must be a dictionary or a Query object that will convert to Query DSL. + Note: reindex is only available in my ElasticSearch branch on github. + """ + indices = self._validate_indices(indices) + if doc_types is None: + doc_types = [] + if isinstance(doc_types, basestring): + doc_types = [doc_types] + if not isinstance(query, basestring): + if isinstance(query, dict): + if 'query' in query: + query = query['query'] + query = json.dumps(query, cls=self.encoder) + elif hasattr(query, "to_query_json"): + query = query.to_query_json(inner=True) + querystring_args = query_params + indices = self._validate_indices(indices) + body = query + path = self._make_path([','.join(indices), ','.join(doc_types), "_reindexbyquery"]) + return self._send_request('POST', path, body, querystring_args) + + def count(self, query=None, indices=None, doc_types=None, **query_params): + """ + Execute a query against one or more indices and get hits count. + """ + indices = self._validate_indices(indices) + if doc_types is None: + doc_types = [] + if query is None: + from .query import MatchAllQuery + + query = MatchAllQuery() + if hasattr(query, 'to_query_json'): + query = query.to_query_json() + if hasattr(query, 'to_json'): + query = query.to_json() + return self._query_call("_count", query, indices, doc_types, **query_params) + + #--- river management + def create_river(self, river, river_name=None): + """ + Create a river + """ + if hasattr(river, "q"): + river_name = river.name + river = river.q + return self._send_request('PUT', '/_river/%s/_meta' % river_name, river) + + def delete_river(self, river, river_name=None): + """ + Delete a river + """ + if hasattr(river, "q"): + river_name = river.name + return self._send_request('DELETE', '/_river/%s/' % river_name) + + #--- settings management + + def get_settings(self, index=None): + """ + Returns the current settings for an index. + """ + path = self._make_path([index, "_settings"]) + return self._send_request('GET', path) + + def update_settings(self, index, newvalues): + """ + Update Settings of an index. + + """ + path = self._make_path([index, "_settings"]) + return self._send_request('PUT', path, newvalues) + + # def terms(self, fields, indices=None, **query_params): + # """ + # Extract terms and their document frequencies from one or more fields. + # The fields argument must be a list or tuple of fields. + # For valid query params see: + # http://www.elasticsearch.com/docs/elasticsearch/rest_api/terms/ + # """ + # indices = self._validate_indices(indices) + # path = self._make_path([','.join(indices), "_terms"]) + # query_params['fields'] = ','.join(fields) + # return self._send_request('GET', path, params=query_params) + # + def morelikethis(self, index, doc_type, id, fields, **query_params): + """ + Execute a "more like this" search query against one or more fields and get back search hits. + """ + path = self._make_path([index, doc_type, id, '_mlt']) + query_params['fields'] = ','.join(fields) + body = query_params["body"] if query_params.has_key("body") else None + return self._send_request('GET', path, body=body, params=query_params) + + def create_percolator(self, index, name, query, **kwargs): + """ + Create a percolator document + + Any kwargs will be added to the document as extra properties. + + """ + path = self._make_path(['_percolator', index, name]) + + if hasattr(query, 'serialize'): + query = {'query': query.serialize()} + + if not isinstance(query, dict): + raise InvalidQuery("create_percolator() must be supplied with a Query object or dict") + # A direct set of search parameters. + query.update(kwargs) + body = json.dumps(query, cls=self.encoder) + + return self._send_request('PUT', path, body=body) + + def delete_percolator(self, index, name): + """ + Delete a percolator document + """ + return self.delete('_percolator', index, name) + + def percolate(self, index, doc_types, query): + """ + Match a query with a document + """ + + if doc_types is None: + raise RuntimeError('percolate() must be supplied with at least one doc_type') + elif not isinstance(doc_types, list): + doc_types = [doc_types] + + path = self._make_path([index, ','.join(doc_types), '_percolate']) + + body = None + + if hasattr(query, 'to_query_json'): + # Then is a Query object. + body = query.to_query_json() + elif isinstance(query, dict): + # A direct set of search parameters. + body = json.dumps(query, cls=self.encoder) + else: + raise InvalidQuery("percolate() must be supplied with a Query object, or a dict") + + return self._send_request('GET', path, body=body) + + +def decode_json(data): + """ Decode some json to dict""" + return json.loads(data, cls=ES.decoder) + + +def encode_json(data): + """ Encode some json to dict""" + return json.dumps(data, cls=ES.encoder) + + +class ResultSet(object): + def __init__(self, connection, query, indices=None, doc_types=None, query_params=None, + auto_fix_keys=False, auto_clean_highlight=False): + """ + results: an es query results dict + fix_keys: remove the "_" from every key, useful for django views + clean_highlight: removed empty highlight + query can be a dict or a Search object. + """ + self.connection = connection + self.indices = indices + self.doc_types = doc_types + self.query_params = query_params or {} + self.scroller_parameters = {} + self.scroller_id = None + self._results = None + self._total = None + self.valid = False + self._facets = {} + self.auto_fix_keys = auto_fix_keys + self.auto_clean_highlight = auto_clean_highlight + + from .query import Search, Query + + if not isinstance(query, (Query, Search, dict)): + raise InvalidQuery("search() must be supplied with a Search or Query object, or a dict") + + if not isinstance(query, Search): + self.query = Search(query) + else: + self.query = query + + self.iterpos = 0 #keep track of iterator position + self.start = self.query.start or query_params.get("start", 0) + self._max_item = self.query.size + self._current_item = 0 + self.chuck_size = self.query.bulk_read or self.query.size or 10 + + def _do_search(self, auto_increment=False): + self.iterpos = 0 + process_post_query = True #used to skip results in first scan + if self.scroller_id is None: + if auto_increment: + self.start += self.chuck_size + + self.query.start = self.start + self.query.size = self.chuck_size + + self._results = self.connection.search_raw(self.query, indices=self.indices, + doc_types=self.doc_types, **self.query_params) + if 'search_type' in self.query_params and self.query_params['search_type'] == "scan": + self.scroller_parameters['search_type'] = self.query_params['search_type'] + del self.query_params['search_type'] + if 'scroll' in self.query_params: + self.scroller_parameters['scroll'] = self.query_params['scroll'] + del self.query_params['scroll'] + if 'size' in self.query_params: + self.scroller_parameters['size'] = self.query_params['size'] + del self.query_params['size'] + self.chuck_size = self.scroller_parameters['size'] + if '_scroll_id' in self._results: + #scan query, let's load the first bulk of data + self.scroller_id = self._results['_scroll_id'] + self._do_search() + process_post_query = False + else: + try: + self._results = self.connection.search_scroll(self.scroller_id, + self.scroller_parameters.get("scroll", "10m")) + self.scroller_id = self._results['_scroll_id'] + except ReduceSearchPhaseException: + #bad hack, should be not hits on the last iteration + self._results['hits']['hits'] = [] + + if process_post_query: + self._facets = self._results.get('facets', {}) + if 'hits' in self._results: + self.valid = True + self.hits = self._results['hits']['hits'] + else: + self.hits = [] + if self.auto_fix_keys: + self._fix_keys() + if self.auto_clean_highlight: + self.clean_highlight() + + @property + def total(self): + if self._results is None: + self._do_search() + if self._total is None: + self._total = 0 + if self.valid: + self._total = self._results.get("hits", {}).get('total', 0) + return self._total + + @property + def facets(self): + if self._results is None: + self._do_search() + return self._facets + + def __len__(self): + return self.total + + def count(self): + return self.total + + def fix_keys(self): + """ + Remove the _ from the keys of the results + """ + if not self.valid: + return + + for hit in self._results['hits']['hits']: + for key, item in hit.items(): + if key.startswith("_"): + hit[key[1:]] = item + del hit[key] + + def clean_highlight(self): + """ + Remove the empty highlight + """ + if not self.valid: + return + + for hit in self._results['hits']['hits']: + if 'highlight' in hit: + hl = hit['highlight'] + for key, item in hl.items(): + if not item: + del hl[key] + + def __getattr__(self, name): + if self._results is None: + self._do_search() + if name == "facets": + return self._facets + return self._results['hits'][name] + + def __getitem__(self, val): + if not isinstance(val, (int, long, slice)): + raise TypeError('%s indices must be integers, not %s' % ( + self.__class__.__name__, val.__class__.__name__)) + + def get_start_end(val): + if isinstance(val, slice): + start = val.start + if not start: + start = 0 + else: + start -= 1 + end = val.stop or self.total + if end < 0: + end = self.total + end + if self._max_item is not None and end > self._max_item: + end = self._max_item + return start, end + return val, val + 1 + + start, end = get_start_end(val) + model = self.connection.model + + if self._results: + if start >= self.start and end < self.start + self.chuck_size: + if not isinstance(val, slice): + return model(self.connection, self._results['hits']['hits'][val - self.start]) + else: + return [model(self.connection, hit) for hit in self._results['hits']['hits'][start:end]] + + query = self.query.serialize() + query['from'] = start + query['size'] = end - start + + results = self.connection.search_raw(query, indices=self.indices, + doc_types=self.doc_types, **self.query_params) + + hits = results['hits']['hits'] + if not isinstance(val, slice): + if len(hits) == 1: + return model(self.connection, hits[0]) + raise IndexError + return [model(self.connection, hit) for hit in hits] + + def next(self): + if self._max_item is not None and self._current_item == self._max_item: + raise StopIteration + if self._results is None: + self._do_search() + if "_scroll_id" in self._results and self._total != 0 and self._current_item == 0 and len( + self._results["hits"].get("hits", [])) == 0: + self._do_search() + if len(self.hits) == 0: + raise StopIteration + if self.iterpos < len(self.hits): + res = self.hits[self.iterpos] + self.iterpos += 1 + self._current_item += 1 + return self.connection.model(self.connection, res) + + if self.iterpos == self.total: + raise StopIteration + self._do_search(auto_increment=True) + self.iterpos = 0 + if len(self.hits) == 0: + raise StopIteration + res = self.hits[self.iterpos] + self.iterpos += 1 + self._current_item += 1 + return self.connection.model(self.connection, res) + + def __iter__(self): + self.iterpos = 0 + if self._current_item != 0: + self._results = None + self._current_item = 0 + + self.start = 0 + return self diff --git a/src/archivematicaCommon/lib/externals/pyes/exceptions.py b/src/archivematicaCommon/lib/externals/pyes/exceptions.py new file mode 100644 index 0000000000..3d40a3e3c0 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/exceptions.py @@ -0,0 +1,137 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +from .utils import EqualityComparableUsingAttributeDictionary + +__author__ = 'Alberto Paro' + +__all__ = ['NoServerAvailable', + "QueryError", + "NotFoundException", + "AlreadyExistsException", + "IndexAlreadyExistsException", + "IndexMissingException", + "SearchPhaseExecutionException", + "InvalidQuery", + "InvalidParameterQuery", + "InvalidParameter", + "QueryParameterError", + "ScriptFieldsError", + "ReplicationShardOperationFailedException", + "ClusterBlockException", + "MapperParsingException", + "ElasticSearchException", + 'ReduceSearchPhaseException', + "VersionConflictEngineException", + 'DocumentAlreadyExistsEngineException', + "DocumentAlreadyExistsException", + "TypeMissingException", + "BulkOperationException" +] + +class NoServerAvailable(Exception): + pass + + +class InvalidQuery(Exception): + pass + + +class InvalidParameterQuery(InvalidQuery): + pass + + +class QueryError(Exception): + pass + + +class QueryParameterError(Exception): + pass + + +class ScriptFieldsError(Exception): + pass + + +class InvalidParameter(Exception): + pass + + +class ElasticSearchException(Exception): + """Base class of exceptions raised as a result of parsing an error return + from ElasticSearch. + + An exception of this class will be raised if no more specific subclass is + appropriate. + + """ + + def __init__(self, error, status=None, result=None, request=None): + super(ElasticSearchException, self).__init__(error) + self.status = status + self.result = result + self.request = request + + +class ElasticSearchIllegalArgumentException(ElasticSearchException): + pass + + +class IndexMissingException(ElasticSearchException): + pass + + +class NotFoundException(ElasticSearchException): + pass + + +class AlreadyExistsException(ElasticSearchException): + pass + + +class IndexAlreadyExistsException(AlreadyExistsException): + pass + + +class SearchPhaseExecutionException(ElasticSearchException): + pass + + +class ReplicationShardOperationFailedException(ElasticSearchException): + pass + + +class ClusterBlockException(ElasticSearchException): + pass + + +class MapperParsingException(ElasticSearchException): + pass + + +class ReduceSearchPhaseException(ElasticSearchException): + pass + + +class VersionConflictEngineException(ElasticSearchException): + pass + + +class DocumentAlreadyExistsEngineException(ElasticSearchException): + pass + + +class DocumentAlreadyExistsException(ElasticSearchException): + pass + + +class TypeMissingException(ElasticSearchException): + pass + + +class BulkOperationException(ElasticSearchException, EqualityComparableUsingAttributeDictionary): + def __init__(self, errors, bulk_result): + super(BulkOperationException, self).__init__( + u"At least one operation in the bulk request has failed: %s" % errors) + self.errors = errors + self.bulk_result = bulk_result diff --git a/src/archivematicaCommon/lib/externals/pyes/facets.py b/src/archivematicaCommon/lib/externals/pyes/facets.py new file mode 100644 index 0000000000..2a0de071c4 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/facets.py @@ -0,0 +1,396 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from .utils import EqualityComparableUsingAttributeDictionary +from .filters import Filter, TermFilter, TermsFilter, ANDFilter, NotFilter + + +#--- Facet +class FacetFactory(EqualityComparableUsingAttributeDictionary): + def __init__(self): + self.facets = [] + + def add_term_facet(self, *args, **kwargs): + """Add a term factory facet""" + self.facets.append(TermFacet(*args, **kwargs)) + + def add_date_facet(self, *args, **kwargs): + """Add a date factory facet""" + self.facets.append(DateHistogramFacet(*args, **kwargs)) + + def add_geo_facet(self, *args, **kwargs): + """Add a geo factory facet""" + self.facets.append(GeoDistanceFacet(*args, **kwargs)) + + def add(self, facet): + """Add a term factory""" + self.facets.append(facet) + + @property + def q(self): + res = {} + for facet in self.facets: + res.update(facet.serialize()) + return {"facets": res} + + +class Facet(EqualityComparableUsingAttributeDictionary): + def __init__(self, scope=None, nested=None, + is_global=None, facet_filter=None, *args, **kwargs): + self.scope = scope + self.nested = nested + self.is_global = is_global + self.facet_filter = facet_filter + + def _base_parameters(self): + data = {} + if self.scope is not None: + data["scope"] = self.scope + if self.nested is not None: + data["nested"] = self.nested + if self.is_global: + data['global'] = self.is_global + if self.facet_filter: + data.update(self.facet_filter.q) + + return data + + +class QueryFacet(Facet): + _internal_name = "query" + + def __init__(self, name, query, **kwargs): + super(QueryFacet, self).__init__(**kwargs) + self.name = name + self.query = query + + def serialize(self): + data = self._base_parameters() + data[self._internal_name]= self.query.serialize() + return {self.name: data} + + +class FilterFacet(Facet): + _internal_name = "filter" + + def __init__(self, name, query, **kwargs): + super(FilterFacet, self).__init__(**kwargs) + self.name = name + self.query = query + + def serialize(self): + data = self._base_parameters() + data[self._internal_name]= self.query.serialize() + return {self.name: data} + + +class HistogramFacet(Facet): + _internal_name = "histogram" + + def __init__(self, name, + field=None, interval=None, time_interval=None, + key_field=None, value_field=None, + key_script=None, value_script=None, params=None, + **kwargs): + super(HistogramFacet, self).__init__(**kwargs) + self.name = name + self.field = field + self.interval = interval + self.time_interval = time_interval + self.key_field = key_field + self.value_field = value_field + self.key_script = key_script + self.value_script = value_script + self.params = params + + def _add_interval(self, data): + if self.interval: + data['interval'] = self.interval + elif self.time_interval: + data['time_interval'] = self.time_interval + else: + raise RuntimeError("Invalid field: interval or time_interval required") + + def serialize(self): + data = {} + if self.field: + data['field'] = self.field + self._add_interval(data) + elif self.key_field: + data['key_field'] = self.key_field + if self.value_field: + data['value_field'] = self.value_field + else: + raise RuntimeError("Invalid key_field: value_field required") + self._add_interval(data) + elif self.key_script: + data['key_script'] = self.key_script + if self.value_script: + data['value_script'] = self.value_script + else: + raise RuntimeError("Invalid key_script: value_script required") + if self.params: + data['params'] = self.params + if self.interval: + data['interval'] = self.interval + elif self.time_interval: + data['time_interval'] = self.time_interval + params = self._base_parameters() + params[self._internal_name]= data + return {self.name: params} + + +class DateHistogramFacet(Facet): + _internal_name = "date_histogram" + + def __init__(self, name, + field=None, interval=None, zone=None, + key_field=None, value_field=None, + value_script=None, params=None, **kwargs): + super(DateHistogramFacet, self).__init__(**kwargs) + self.name = name + self.field = field + self.interval = interval + self.zone = zone + self.key_field = key_field + self.value_field = value_field + self.value_script = value_script + self.params = params + + def serialize(self): + data = {} + + if self.interval: + data['interval'] = self.interval + if self.zone: + data['zone'] = self.zone + else: + raise RuntimeError("interval required") + if self.field: + data['field'] = self.field + elif self.key_field: + data['key_field'] = self.key_field + if self.value_field: + data['value_field'] = self.value_field + elif self.value_script: + data['value_script'] = self.value_script + if self.params: + data['params'] = self.params + else: + raise RuntimeError("Invalid key_field: value_field or value_script required") + + facet = self._base_parameters() + facet[self._internal_name]= data + return {self.name: facet} + + +class RangeFacet(Facet): + _internal_name = "range" + + def __init__(self, name, + field=None, ranges=None, + key_field=None, value_field=None, + key_script=None, value_script=None, params=None, + **kwargs): + super(RangeFacet, self).__init__(**kwargs) + self.name = name + self.field = field + if ranges is None: + ranges = [] + self.ranges = ranges + self.key_field = key_field + self.value_field = value_field + self.key_script = key_script + self.value_script = value_script + self.params = params + + def serialize(self): + data = {} + + if not self.ranges: + raise RuntimeError("Invalid ranges") + data['ranges'] = self.ranges + + if self.field: + data['field'] = self.field + elif self.key_field: + data['key_field'] = self.key_field + if self.value_field: + data['value_field'] = self.value_field + else: + raise RuntimeError("Invalid key_field: value_field required") + elif self.key_script: + data['key_script'] = self.key_script + if self.value_script: + data['value_script'] = self.value_script + else: + raise RuntimeError("Invalid key_script: value_script required") + if self.params: + data['params'] = self.params + + params = self._base_parameters() + params[self._internal_name]= data + return {self.name: params} + + +class GeoDistanceFacet(RangeFacet): + _internal_name = "geo_distance" + + +class StatisticalFacet(Facet): + _internal_name = "statistical" + + def __init__(self, name, field=None, script=None, params=None, **kwargs): + super(StatisticalFacet, self).__init__(**kwargs) + self.name = name + self.field = field + self.script = script + self.params = params + + def serialize(self): + data = {} + + if self.field: + data['field'] = self.field + elif self.script: + data['script'] = self.script + if self.params: + data['params'] = self.params + + params = self._base_parameters() + params[self._internal_name]= data + return {self.name: params} + + +class TermFacet(Facet): + _internal_name = "terms" + + def __init__(self, field=None, fields=None, name=None, size=10, + order=None, exclude=None, + regex=None, regex_flags="DOTALL", + script=None, **kwargs): + super(TermFacet, self).__init__(**kwargs) + self.name = name + self.field = field + self.fields = fields + if name is None: + self.name = field + self.size = size + self.order = order + self.exclude = exclude or [] + self.regex = regex + self.regex_flags = regex_flags + self.script = script + + def serialize(self): + if self.fields: + data = {'fields': self.fields} + else: + if self.field: + data = {'field': self.field} + else: + raise RuntimeError("Field or Fields is required:%s" % self.order) + + if self.size: + data['size'] = self.size + + if self.order: + if self.order not in ['count', 'term', 'reverse_count', 'reverse_term']: + raise RuntimeError("Invalid order value:%s" % self.order) + data['order'] = self.order + if self.exclude: + data['exclude'] = self.exclude + if self.regex: + data['regex'] = self.regex + if self.regex_flags: + data['regex_flags'] = self.regex_flags + elif self.script: + data['script'] = self.script + params = self._base_parameters() + params[self._internal_name]= data + return {self.name: params} + + +class TermStatsFacet(Facet): + _internal_name = "terms_stats" + + def __init__(self, name, size=10, order=None, + key_field=None, value_field=None, + key_script=None, value_script=None, params=None, + **kwargs): + super(TermStatsFacet, self).__init__(**kwargs) + self.name = name + self.size = size + self.ORDER_VALUES = ['term', 'reverse_term', 'count', 'reverse_count', 'total', + 'reverse_total', 'min', 'reverse_min', 'max', 'reverse_max'] + self.order = order if order is not None else self.ORDER_VALUES[0] + self.key_field = key_field + self.value_field = value_field + self.key_script = key_script + self.value_script = value_script + self.params = params + + def serialize(self): + data = {} + + if self.size: + data['size'] = self.size + + if self.order: + if self.order not in self.ORDER_VALUES: + raise RuntimeError("Invalid order value:%s" % self.order) + data['order'] = self.order + + if self.key_field: + data['key_field'] = self.key_field + if self.value_field: + data['value_field'] = self.value_field + else: + raise RuntimeError("Invalid key_field: value_field required") + elif self.key_script: + data['key_script'] = self.key_script + if self.value_script: + data['value_script'] = self.value_script + else: + raise RuntimeError("Invalid key_script: value_script required") + if self.params: + data['params'] = self.params + + params = self._base_parameters() + params[self._internal_name]= data + return {self.name: params} + +class FacetFilter(Filter): + @property + def q(self): + res = {"facet_filter": self.serialize()} + return res + + +class TermFacetFilter(TermFilter, FacetFilter): + pass + + +class TermsFacetFilter(TermsFilter, FacetFilter): + pass + + +class ANDFacetFilter(ANDFilter, FacetFilter): + pass + + +class NotFacetFilter(NotFilter, FacetFilter): + pass + + +class FacetQueryWrap(EqualityComparableUsingAttributeDictionary): + def __init__(self, wrap_object, **kwargs): + """ + Base Object for every Filter Object + """ + self.wrap_object = wrap_object + + def serialize(self): + return {"query": self.wrap_object.serialize()} + + + diff --git a/src/archivematicaCommon/lib/externals/pyes/fakettypes.py b/src/archivematicaCommon/lib/externals/pyes/fakettypes.py new file mode 100644 index 0000000000..66e1ae2ada --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/fakettypes.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +# +# Fake ttypes to use in http protocol to simulate thrift ones +# + +class Method(object): + GET = 0 + PUT = 1 + POST = 2 + DELETE = 3 + HEAD = 4 + OPTIONS = 5 + + _VALUES_TO_NAMES = { + 0: "GET", + 1: "PUT", + 2: "POST", + 3: "DELETE", + 4: "HEAD", + 5: "OPTIONS", + } + + _NAMES_TO_VALUES = { + "GET": 0, + "PUT": 1, + "POST": 2, + "DELETE": 3, + "HEAD": 4, + "OPTIONS": 5, + } + + +class Status(object): + CONTINUE = 100 + SWITCHING_PROTOCOLS = 101 + OK = 200 + CREATED = 201 + ACCEPTED = 202 + NON_AUTHORITATIVE_INFORMATION = 203 + NO_CONTENT = 204 + RESET_CONTENT = 205 + PARTIAL_CONTENT = 206 + MULTI_STATUS = 207 + MULTIPLE_CHOICES = 300 + MOVED_PERMANENTLY = 301 + FOUND = 302 + SEE_OTHER = 303 + NOT_MODIFIED = 304 + USE_PROXY = 305 + TEMPORARY_REDIRECT = 307 + BAD_REQUEST = 400 + UNAUTHORIZED = 401 + PAYMENT_REQUIRED = 402 + FORBIDDEN = 403 + NOT_FOUND = 404 + METHOD_NOT_ALLOWED = 405 + NOT_ACCEPTABLE = 406 + PROXY_AUTHENTICATION = 407 + REQUEST_TIMEOUT = 408 + CONFLICT = 409 + GONE = 410 + LENGTH_REQUIRED = 411 + PRECONDITION_FAILED = 412 + REQUEST_ENTITY_TOO_LARGE = 413 + REQUEST_URI_TOO_LONG = 414 + UNSUPPORTED_MEDIA_TYPE = 415 + REQUESTED_RANGE_NOT_SATISFIED = 416 + EXPECTATION_FAILED = 417 + UNPROCESSABLE_ENTITY = 422 + LOCKED = 423 + FAILED_DEPENDENCY = 424 + INTERNAL_SERVER_ERROR = 500 + NOT_IMPLEMENTED = 501 + BAD_GATEWAY = 502 + SERVICE_UNAVAILABLE = 503 + GATEWAY_TIMEOUT = 504 + INSUFFICIENT_STORAGE = 506 + + _VALUES_TO_NAMES = { + 100: "CONTINUE", + 101: "SWITCHING_PROTOCOLS", + 200: "OK", + 201: "CREATED", + 202: "ACCEPTED", + 203: "NON_AUTHORITATIVE_INFORMATION", + 204: "NO_CONTENT", + 205: "RESET_CONTENT", + 206: "PARTIAL_CONTENT", + 207: "MULTI_STATUS", + 300: "MULTIPLE_CHOICES", + 301: "MOVED_PERMANENTLY", + 302: "FOUND", + 303: "SEE_OTHER", + 304: "NOT_MODIFIED", + 305: "USE_PROXY", + 307: "TEMPORARY_REDIRECT", + 400: "BAD_REQUEST", + 401: "UNAUTHORIZED", + 402: "PAYMENT_REQUIRED", + 403: "FORBIDDEN", + 404: "NOT_FOUND", + 405: "METHOD_NOT_ALLOWED", + 406: "NOT_ACCEPTABLE", + 407: "PROXY_AUTHENTICATION", + 408: "REQUEST_TIMEOUT", + 409: "CONFLICT", + 410: "GONE", + 411: "LENGTH_REQUIRED", + 412: "PRECONDITION_FAILED", + 413: "REQUEST_ENTITY_TOO_LARGE", + 414: "REQUEST_URI_TOO_LONG", + 415: "UNSUPPORTED_MEDIA_TYPE", + 416: "REQUESTED_RANGE_NOT_SATISFIED", + 417: "EXPECTATION_FAILED", + 422: "UNPROCESSABLE_ENTITY", + 423: "LOCKED", + 424: "FAILED_DEPENDENCY", + 500: "INTERNAL_SERVER_ERROR", + 501: "NOT_IMPLEMENTED", + 502: "BAD_GATEWAY", + 503: "SERVICE_UNAVAILABLE", + 504: "GATEWAY_TIMEOUT", + 506: "INSUFFICIENT_STORAGE", + } + + _NAMES_TO_VALUES = { + "CONTINUE": 100, + "SWITCHING_PROTOCOLS": 101, + "OK": 200, + "CREATED": 201, + "ACCEPTED": 202, + "NON_AUTHORITATIVE_INFORMATION": 203, + "NO_CONTENT": 204, + "RESET_CONTENT": 205, + "PARTIAL_CONTENT": 206, + "MULTI_STATUS": 207, + "MULTIPLE_CHOICES": 300, + "MOVED_PERMANENTLY": 301, + "FOUND": 302, + "SEE_OTHER": 303, + "NOT_MODIFIED": 304, + "USE_PROXY": 305, + "TEMPORARY_REDIRECT": 307, + "BAD_REQUEST": 400, + "UNAUTHORIZED": 401, + "PAYMENT_REQUIRED": 402, + "FORBIDDEN": 403, + "NOT_FOUND": 404, + "METHOD_NOT_ALLOWED": 405, + "NOT_ACCEPTABLE": 406, + "PROXY_AUTHENTICATION": 407, + "REQUEST_TIMEOUT": 408, + "CONFLICT": 409, + "GONE": 410, + "LENGTH_REQUIRED": 411, + "PRECONDITION_FAILED": 412, + "REQUEST_ENTITY_TOO_LARGE": 413, + "REQUEST_URI_TOO_LONG": 414, + "UNSUPPORTED_MEDIA_TYPE": 415, + "REQUESTED_RANGE_NOT_SATISFIED": 416, + "EXPECTATION_FAILED": 417, + "UNPROCESSABLE_ENTITY": 422, + "LOCKED": 423, + "FAILED_DEPENDENCY": 424, + "INTERNAL_SERVER_ERROR": 500, + "NOT_IMPLEMENTED": 501, + "BAD_GATEWAY": 502, + "SERVICE_UNAVAILABLE": 503, + "GATEWAY_TIMEOUT": 504, + "INSUFFICIENT_STORAGE": 506, + } + + +class RestRequest(object): + """ + Attributes: + - method + - uri + - parameters + - headers + - body + """ + + def __init__(self, method=None, uri=None, parameters=None, headers=None, body=None): + self.method = method + self.uri = uri + self.parameters = parameters + self.headers = headers + self.body = body + + def __repr__(self): + full_url = 'http://localhost:9200' + self.uri + if len(self.parameters) > 0: + full_url += '?' + for k, v in self.parameters: + full_url += k + '&' + v + + return "curl -X%s %s -d '%s'" % ( + Method._VALUES_TO_NAMES[self.method], + full_url, + self.body, + ) + + +class RestResponse(object): + """ + Attributes: + - status + - headers + - body + """ + + def __init__(self, status=None, headers=None, body=None): + self.status = status + self.headers = headers + self.body = body + + diff --git a/src/archivematicaCommon/lib/externals/pyes/filters.py b/src/archivematicaCommon/lib/externals/pyes/filters.py new file mode 100644 index 0000000000..2a6ff2bf51 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/filters.py @@ -0,0 +1,498 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import copy + +from .exceptions import QueryParameterError +from .utils import ESRange, EqualityComparableUsingAttributeDictionary +from .es import encode_json, json + +class Filter(EqualityComparableUsingAttributeDictionary): + _internal_name = "undefined" + + def __init__(self, _cache=None, _cache_key=None, **kwargs): + """ + Base Object for every Filter Object + """ + self._cache=_cache + self._cache_key=_cache_key + + def serialize(self): + raise NotImplementedError + + def _add_parameters(self, data): + """ + Extend the serializable dict adding global parameters if they are set + """ + if self._cache: + data["_cache"]=self._cache + if self._cache_key: + data["_cache_key"]=self._cache_key + return data + + @property + def q(self): + res = {"filter": self.serialize()} + return res + + def to_json(self): + return encode_json(self.q) + + +class FilterList(Filter): + def __init__(self, filters, **kwargs): + super(FilterList, self).__init__(**kwargs) + self.filters = filters + + def serialize(self): + if not self.filters: + raise RuntimeError("A least a filter must be declared") + return self._add_parameters({self._internal_name: [filter.serialize() for filter in self.filters]}) + + def __iter__(self): + return iter(self.filters) + + +class ANDFilter(FilterList): + """ + A filter that matches combinations of other filters using the AND operator + + Example: + + t1 = TermFilter('name', 'john') + t2 = TermFilter('name', 'smith') + f = ANDFilter([t1, t2]) + q = FilteredQuery(MatchAllQuery(), f) + results = conn.search(q) + + """ + _internal_name = "and" + + def __init__(self, *args, **kwargs): + super(ANDFilter, self).__init__(*args, **kwargs) + + +class BoolFilter(Filter): + """ + A filter that matches documents matching boolean combinations of other + queries. Similar in concept to Boolean query, except that the clauses are + other filters. Can be placed within queries that accept a filter. + """ + + def __init__(self, must=None, must_not=None, should=None, + minimum_number_should_match=1, **kwargs): + super(BoolFilter, self).__init__(**kwargs) + + self._must = [] + self._must_not = [] + self._should = [] + self.minimum_number_should_match = minimum_number_should_match + if must: + self.add_must(must) + + if must_not: + self.add_must_not(must_not) + + if should: + self.add_should(should) + + def add_must(self, queries): + if isinstance(queries, list): + self._must.extend(queries) + else: + self._must.append(queries) + + def add_must_not(self, queries): + if isinstance(queries, list): + self._must_not.extend(queries) + else: + self._must_not.append(queries) + + def add_should(self, queries): + if isinstance(queries, list): + self._should.extend(queries) + else: + self._should.append(queries) + + def is_empty(self): + return not any([self._must, self._must_not, self._should]) + + def serialize(self): + filters = {} + if self._must: + filters['must'] = [f.serialize() for f in self._must] + if self._must_not: + filters['must_not'] = [f.serialize() for f in self._must_not] + if self._should: + filters['should'] = [f.serialize() for f in self._should] + filters['minimum_number_should_match'] = self.minimum_number_should_match + if not filters: + raise RuntimeError("A least a filter must be declared") + return self._add_parameters({"bool": filters}) + + +class ORFilter(FilterList): + """ + A filter that matches combinations of other filters using the OR operator + + Example: + + t1 = TermFilter('name', 'john') + t2 = TermFilter('name', 'smith') + f = ORFilter([t1, t2]) + q = FilteredQuery(MatchAllQuery(), f) + results = conn.search(q) + + """ + _internal_name = "or" + + def __init__(self, *args, **kwargs): + super(ORFilter, self).__init__(*args, **kwargs) + + +class NotFilter(Filter): + _internal_name = "not" + + def __init__(self, filter, **kwargs): + super(NotFilter, self).__init__(**kwargs) + self.filter = filter + + def serialize(self): + if not isinstance(self.filter, Filter): + raise RuntimeError("NotFilter argument should be a Filter") + return self._add_parameters({self._internal_name: {"filter": self.filter.serialize()}}) + + +class RangeFilter(Filter): + def __init__(self, qrange=None, **kwargs): + super(RangeFilter, self).__init__(**kwargs) + + self.ranges = [] + if qrange: + self.add(qrange) + + def add(self, qrange): + if isinstance(qrange, list): + self.ranges.extend(qrange) + elif isinstance(qrange, ESRange): + self.ranges.append(qrange) + + def serialize(self): + if not self.ranges: + raise RuntimeError("A least a range must be declared") + filters = dict([r.serialize() for r in self.ranges]) + return self._add_parameters({"range": filters}) + +NumericRangeFilter = RangeFilter + +class PrefixFilter(Filter): + _internal_name = "prefix" + + def __init__(self, field=None, prefix=None, **kwargs): + super(PrefixFilter, self).__init__(**kwargs) + self._values = {} + + if field is not None and prefix is not None: + self.add(field, prefix) + + def add(self, field, prefix): + self._values[field] = prefix + + def serialize(self): + if not self._values: + raise RuntimeError("A least a field/prefix pair must be added") + return self._add_parameters({self._internal_name: self._values}) + + +class ScriptFilter(Filter): + _internal_name = "script" + + def __init__(self, script, params=None, **kwargs): + super(ScriptFilter, self).__init__(**kwargs) + self.script = script + self.params = params + + + def add(self, field, value): + self.params[field] = {'value': value} + + def serialize(self): + data = {'script': self.script} + if self.params is not None: + data['params'] = self.params + return self._add_parameters({self._internal_name: data}) + + +class TermFilter(Filter): + _internal_name = "term" + + def __init__(self, field=None, value=None, _name=None, **kwargs): + super(TermFilter, self).__init__(**kwargs) + self._values = {} + self._name = _name + + if field is not None and value is not None: + self.add(field, value) + + def add(self, field, value): + self._values[field] = value + + def serialize(self): + if not self._values: + raise RuntimeError("A least a field/value pair must be added") + result = {self._internal_name: self._values} + if self._name: + result[self._internal_name]['_name'] = self._name + return self._add_parameters({self._internal_name: self._values}) + + +class ExistsFilter(TermFilter): + _internal_name = "exists" + + def __init__(self, field=None, **kwargs): + super(ExistsFilter, self).__init__(field="field", value=field, **kwargs) + + +class MissingFilter(TermFilter): + _internal_name = "missing" + + def __init__(self, field=None, **kwargs): + super(MissingFilter, self).__init__(field="field", value=field, **kwargs) + + +class RegexTermFilter(Filter): + _internal_name = "regex_term" + + def __init__(self, field=None, value=None, **kwargs): + super(RegexTermFilter, self).__init__(**kwargs) + self._values = {} + + if field is not None and value is not None: + self.add(field, value) + + def add(self, field, value): + self._values[field] = value + + def serialize(self): + if not self._values: + raise RuntimeError("A least a field/value pair must be added") + return self._add_parameters({self._internal_name: self._values}) + +class LimitFilter(Filter): + _internal_name = "limit" + + def __init__(self, value=100, **kwargs): + super(LimitFilter, self).__init__(**kwargs) + self.value=value + + def serialize(self): + return self._add_parameters({self._internal_name: {"value":self.value}}) + +class TermsFilter(Filter): + _internal_name = "terms" + + def __init__(self, field=None, values=None, _name=None, execution=None, **kwargs): + super(TermsFilter, self).__init__(**kwargs) + self._values = {} + self._name = _name + self.execution = execution + + if field is not None and values is not None: + self.add(field, values) + + def add(self, field, values): + self._values[field] = values + + def serialize(self): + if not self._values: + raise RuntimeError("A least a field/value pair must be added") + data = copy.deepcopy(self._values) + if self.execution: + data['execution'] = self.execution + if self._name: + data['_name'] = self._name + return self._add_parameters({self._internal_name: data}) + + +class QueryFilter(Filter): + _internal_name = "query" + + def __init__(self, query, **kwargs): + super(QueryFilter, self).__init__(**kwargs) + self._query = query + + def serialize(self): + if not self._query: + raise RuntimeError("A least a field/value pair must be added") + return self._add_parameters({self._internal_name: self._query.serialize()}) + +# +#--- Geo Queries +#http://www.elasticsearch.com/blog/2010/08/16/geo_location_and_search.html + +class GeoDistanceFilter(Filter): + """ + + http://github.com/elasticsearch/elasticsearch/issues/279 + + """ + _internal_name = "geo_distance" + + def __init__(self, field, location, distance, distance_type="arc", distance_unit=None, **kwargs): + super(GeoDistanceFilter, self).__init__(**kwargs) + self.field = field + self.location = location + self.distance = distance + self.distance_type = distance_type + self.distance_unit = distance_unit + + def serialize(self): + if self.distance_type not in ["arc", "plane"]: + raise QueryParameterError("Invalid distance_type") + + params = {"distance": self.distance, self.field: self.location} + if self.distance_type != "arc": + params['distance_type'] = self.distance_type + + if self.distance_unit: + if self.distance_unit not in ["km", "mi", "miles"]: + raise QueryParameterError("Invalid distance_unit") + params['distance_unit'] = self.distance_unit + + return self._add_parameters({self._internal_name: params}) + + +class GeoBoundingBoxFilter(Filter): + """ + + http://github.com/elasticsearch/elasticsearch/issues/290 + + """ + _internal_name = "geo_bounding_box" + + def __init__(self, field, location_tl, location_br, **kwargs): + super(GeoBoundingBoxFilter, self).__init__(**kwargs) + self.field = field + self.location_tl = location_tl + self.location_br = location_br + + def serialize(self): + return self._add_parameters({self._internal_name: { + self.field: { + "top_left": self.location_tl, + "bottom_right": self.location_br + } + } + }) + + +class GeoPolygonFilter(Filter): + """ + + http://github.com/elasticsearch/elasticsearch/issues/294 + + """ + _internal_name = "geo_polygon" + + def __init__(self, field, points, **kwargs): + super(GeoPolygonFilter, self).__init__(**kwargs) + self.field = field + self.points = points + + def serialize(self): + return self._add_parameters({self._internal_name: { + self.field: { + "points": self.points, + } + } + }) + + +class MatchAllFilter(Filter): + """ + A filter that matches on all documents + """ + _internal_name = "match_all" + + def __init__(self, **kwargs): + super(MatchAllFilter, self).__init__(**kwargs) + + def serialize(self): + return self._add_parameters({self._internal_name: {}}) + + +class HasChildFilter(Filter): + """ + The has_child filter accepts a query and the child type to run against, + and results in parent documents that have child docs matching the query + """ + _internal_name = "has_child" + + def __init__(self, type, filter, _scope=None, **kwargs): + super(HasChildFilter, self).__init__(**kwargs) + self.filter = filter + self.type = type + self._scope = _scope + + def serialize(self): + if not isinstance(self.filter, Filter): + raise RuntimeError("NotFilter argument should be a Filter") + data = {"query": self.filter.serialize(), + "type": self.type} + if self._scope is not None: + data['_scope'] = self._scope + return self._add_parameters({self._internal_name: data}) + + +class NestedFilter(Filter): + """ + A nested filter, works in a similar fashion to the nested query, except + used as a filter. It follows exactly the same structure, but also allows + to cache the results (set _cache to true), and have it named + (set the _name value). """ + _internal_name = "nested" + + def __init__(self, path, filter, **kwargs): + super(NestedFilter, self).__init__(**kwargs) + self.path = path + self.filter = filter + + def serialize(self): + data = { + 'path': self.path, + 'query': self.filter.serialize()} + return self._add_parameters({self._internal_name: data}) + + +class IdsFilter(Filter): + _internal_name = "ids" + def __init__(self, values, type=None, **kwargs): + super(IdsFilter, self).__init__(**kwargs) + self.type = type + self.values = values + + def serialize(self): + data = {} + if self.type: + data['type'] = self.type + if isinstance(self.values, basestring): + data['values'] = [self.values] + else: + data['values'] = self.values + + return self._add_parameters({self._internal_name: data}) + + +class RawFilter(Filter): + """ + Uses exactly the filter provided as an ES filter. + """ + + def __init__(self, filter_text_or_dict, **kwargs): + super(RawFilter, self).__init__(**kwargs) + if isinstance(filter_text_or_dict, basestring): + self._filter = json.loads(filter_text_or_dict) + else: + self._filter = filter_text_or_dict + + def serialize(self): + return self._filter diff --git a/src/archivematicaCommon/lib/externals/pyes/helpers.py b/src/archivematicaCommon/lib/externals/pyes/helpers.py new file mode 100644 index 0000000000..6be568119f --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/helpers.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +class SettingsBuilder(object): + def __init__(self, settings=None, mappings=None): + self.settings = settings or {'index.number_of_replicas': 1, + "index.number_of_shards": 5} + self.mappings = mappings or {} + + def add_mapping(self, data, name=None): + """ + Add a new mapping + """ + if name: + self.mappings[name] = data + else: + if isinstance(data, dict): + self.mappings.update(data) + elif isinstance(data, list): + for d in data: + self.mappings.update(d) + + def as_dict(self): + """Returns a dict""" + return {"settings": self.settings, + "mappings": self.mappings} diff --git a/src/archivematicaCommon/lib/externals/pyes/highlight.py b/src/archivematicaCommon/lib/externals/pyes/highlight.py new file mode 100644 index 0000000000..99dfc80cfe --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/highlight.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- + +class HighLighter(object): + """ + This object manage the highlighting + + :arg pre_tags: list of tags before the highlighted text. + importance is ordered.. ex. ``['<b>']`` + :arg post_tags: list of end tags after the highlighted text. + should line up with pre_tags. ex. ``['</b>']`` + :arg fields: list of fields to highlight + :arg fragment_size: the size of the grament + :arg number_or_fragments: the maximum number of fragments to + return; if 0, then no fragments are returned and instead the + entire field is returned and highlighted. + :arg fragment_offset: controls the margin to highlight from + + Use this with a :py:class:`pyes.query.Search` like this:: + + h = HighLighter(['<b>'], ['</b>']) + s = Search(TermQuery('foo'), highlight=h) + """ + + def __init__(self, pre_tags=None, post_tags=None, fields=None, fragment_size=None, number_of_fragments=None, + fragment_offset=None): + self.pre_tags = pre_tags + self.post_tags = post_tags + self.fields = fields or {} + self.fragment_size = fragment_size + self.number_of_fragments = number_of_fragments + self.fragment_offset = fragment_offset + + def add_field(self, name, fragment_size=150, number_of_fragments=3, fragment_offset=None): + """ + Add a field to Highlinghter + """ + data = {} + if fragment_size: + data['fragment_size'] = fragment_size + if number_of_fragments is not None: + data['number_of_fragments'] = number_of_fragments + if fragment_offset is not None: + data['fragment_offset'] = fragment_offset + self.fields[name] = data + + def serialize(self): + res = {} + if self.pre_tags: + res["pre_tags"] = self.pre_tags + if self.post_tags: + res["post_tags"] = self.post_tags + if self.fragment_size: + res["fragment_size"] = self.fragment_size + if self.number_of_fragments: + res["number_of_fragments"] = self.number_of_fragments + if self.fragment_offset: + res["fragment_offset"] = self.fragment_offset + if self.fields: + res["fields"] = self.fields + else: + res["fields"] = {"_all": {}} + return res diff --git a/src/archivematicaCommon/lib/externals/pyes/mappings.py b/src/archivematicaCommon/lib/externals/pyes/mappings.py new file mode 100644 index 0000000000..7b803f6bac --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/mappings.py @@ -0,0 +1,524 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +import threading + +_thread_locals = threading.local() +#store threadsafe data +from .utils import keys_to_string + +check_values = { + 'index': ['no', 'analyzed', 'not_analyzed'], + 'term_vector': ['no', 'yes', 'with_offsets', 'with_positions', 'with_positions_offsets'], + 'type': ['float', 'double', 'short', 'integer', 'long'], + 'store': ['yes', 'no'], + 'index_analyzer': [], + 'search_analyzer': [], + } + + +class AbstractField(object): + def __init__(self, index="not_analyzed", store="no", boost=1.0, + term_vector="no", omit_norms=True, + omit_term_freq_and_positions=True, + type=None, index_name=None, + analyzer=None, + index_analyzer=None, + search_analyzer=None, + name=None): + self.store = store + self.boost = boost + self.term_vector = term_vector + self.index = index + self.omit_norms = omit_norms + self.omit_term_freq_and_positions = omit_term_freq_and_positions + self.index_name = index_name + self.type = type + self.analyzer = analyzer + self.index_analyzer = index_analyzer + self.search_analyzer = search_analyzer + self.name = name + + def as_dict(self): + result = {"type": self.type, + 'index': self.index} + if self.store != "no": + if isinstance(self.store, bool): + if self.store: + result['store'] = "yes" + else: + result['store'] = "no" + else: + result['store'] = self.store + if self.boost != 1.0: + result['boost'] = self.boost + if self.term_vector != "no": + result['term_vector'] = self.term_vector + if self.omit_norms != True: + result['omit_norms'] = self.omit_norms + if self.omit_term_freq_and_positions != True: + result['omit_term_freq_and_positions'] = self.omit_term_freq_and_positions + if self.index_name: + result['index_name'] = self.index_name + if self.analyzer: + result['analyzer'] = self.analyzer + if self.index_analyzer: + result['index_analyzer'] = self.index_analyzer + if self.search_analyzer: + result['search_analyzer'] = self.search_analyzer + + return result + + +class StringField(AbstractField): + def __init__(self, null_value=None, include_in_all=None, *args, **kwargs): + super(StringField, self).__init__(*args, **kwargs) + self.null_value = null_value + self.include_in_all = include_in_all + self.type = "string" + + def as_dict(self): + result = super(StringField, self).as_dict() + if self.null_value is not None: + result['null_value'] = self.null_value + if self.include_in_all is not None: + result['include_in_all'] = self.include_in_all + return result + + +class GeoPointField(AbstractField): + def __init__(self, null_value=None, include_in_all=None, + lat_lon=None, geohash=None, geohash_precision=None, + *args, **kwargs): + super(GeoPointField, self).__init__(*args, **kwargs) + self.null_value = null_value + self.include_in_all = include_in_all + self.lat_lon = lat_lon + self.geohash = geohash + self.geohash_precision = geohash_precision + self.type = "geo_point" + + def as_dict(self): + result = super(GeoPointField, self).as_dict() + if self.null_value is not None: + result['null_value'] = self.null_value + if self.include_in_all is not None: + result['include_in_all'] = self.include_in_all + if self.lat_lon is not None: + result['lat_lon'] = self.lat_lon + if self.geohash is not None: + result['geohash'] = self.geohash + if self.geohash_precision is not None: + try: + int(self.geohash_precision) + except ValueError: + raise ValueError("geohash_precision must be an integer") + result['geohash_precision'] = self.geohash_precision + return result + + +class NumericFieldAbstract(AbstractField): + def __init__(self, null_value=None, include_in_all=None, precision_step=4, + numeric_resolution=None, **kwargs): + super(NumericFieldAbstract, self).__init__(**kwargs) + self.null_value = null_value + self.include_in_all = include_in_all + self.precision_step = precision_step + self.numeric_resolution = numeric_resolution + + def as_dict(self): + result = super(NumericFieldAbstract, self).as_dict() + if self.null_value is not None: + result['null_value'] = self.null_value + if self.include_in_all is not None: + result['include_in_all'] = self.include_in_all + if self.precision_step != 4: + result['precision_step'] = self.precision_step + if self.numeric_resolution: + result['numeric_resolution'] = self.numeric_resolution + return result + + +class IpField(NumericFieldAbstract): + def __init__(self, *args, **kwargs): + super(IpField, self).__init__(*args, **kwargs) + self.type = "ip" + + +class ShortField(NumericFieldAbstract): + def __init__(self, *args, **kwargs): + super(ShortField, self).__init__(*args, **kwargs) + self.type = "short" + + +class IntegerField(NumericFieldAbstract): + def __init__(self, *args, **kwargs): + super(IntegerField, self).__init__(*args, **kwargs) + self.type = "integer" + + +class LongField(NumericFieldAbstract): + def __init__(self, *args, **kwargs): + super(LongField, self).__init__(*args, **kwargs) + self.type = "long" + + +class FloatField(NumericFieldAbstract): + def __init__(self, *args, **kwargs): + super(FloatField, self).__init__(*args, **kwargs) + self.type = "float" + + +class DoubleField(NumericFieldAbstract): + def __init__(self, *args, **kwargs): + super(DoubleField, self).__init__(*args, **kwargs) + self.type = "double" + + +class DateField(NumericFieldAbstract): + def __init__(self, format=None, **kwargs): + super(DateField, self).__init__(**kwargs) + self.format = format + self.type = "date" + + def as_dict(self): + result = super(DateField, self).as_dict() + if self.format: + result['format'] = self.format + return result + + +class BooleanField(AbstractField): + def __init__(self, null_value=None, include_in_all=None, *args, **kwargs): + super(BooleanField, self).__init__(*args, **kwargs) + self.null_value = null_value + self.include_in_all = include_in_all + self.type = "boolean" + + def as_dict(self): + result = super(BooleanField, self).as_dict() + if self.null_value is not None: + result['null_value'] = self.null_value + if self.include_in_all is not None: + result['include_in_all'] = self.include_in_all + return result + + +class MultiField(object): + def __init__(self, name, type=None, path=None, fields=None): + self.name = name + self.type = "multi_field" + self.path = path + self.fields = {} + if fields: + if isinstance(fields, dict): + self.fields = dict([(name, get_field(name, data)) for name, data in fields.items()]) + elif isinstance(fields, list): + for field in fields: + self.fields[field.name] = field.as_dict() + + def as_dict(self): + result = {"type": self.type, + "fields": {}} + if self.fields: + for name, value in self.fields.items(): + if isinstance(value, dict): + result['fields'][name] = value + else: + result['fields'][name] = value.as_dict() + if self.path: + result['path'] = self.path + return result + + +class AttachmentField(object): + """An attachment field. + + Requires the mapper-attachments plugin to be installed to be used. + + """ + + def __init__(self, name, type=None, path=None, fields=None): + self.name = name + self.type = "attachment" + self.path = path + self.fields = dict([(name, get_field(name, data)) for name, data in fields.items()]) + + def as_dict(self): + result_fields = dict((name, value.as_dict()) + for (name, value) in self.fields.items()) + result = dict(type=self.type, fields=result_fields) + if self.path: + result['path'] = self.path + return result + + +class ObjectField(object): + def __init__(self, name=None, type=None, path=None, properties=None, + dynamic=None, enabled=None, include_in_all=None, dynamic_templates=None, + include_in_parent=None, include_in_root=None, + connection=None, index_name=None): + self.name = name + self.type = "object" + self.path = path + self.properties = properties + self.include_in_all = include_in_all + self.dynamic = dynamic + self.dynamic_templates = dynamic_templates or [] + self.enabled = enabled + self.include_in_all = include_in_all + self.include_in_parent = include_in_parent + self.include_in_root = include_in_root + self.connection = connection + self.index_name = index_name + if properties: + self.properties = dict([(name, get_field(name, data)) for name, data in properties.items()]) + else: + self.properties = {} + + def add_property(self, prop): + """ + Add a property to the object + """ + self.properties[prop.name] = prop + + def as_dict(self): + result = {"type": self.type, + "properties": {}} + if self.dynamic is not None: + result['dynamic'] = self.dynamic + if self.enabled is not None: + result['enabled'] = self.enabled + if self.include_in_all is not None: + result['include_in_all'] = self.include_in_all + if self.include_in_parent is not None: + result['include_in_parent'] = self.include_in_parent + if self.include_in_root is not None: + result['include_in_root'] = self.include_in_root + + if self.path is not None: + result['path'] = self.path + + if self.properties: + for name, value in self.properties.items(): + result['properties'][name] = value.as_dict() + return result + + def __str__(self): + return str(self.as_dict()) + + def save(self): + if self.connection is None: + raise RuntimeError("No connection available") + + self.connection.put_mapping(doc_type=self.name, mapping=self.as_dict(), indices=self.index_name) + + +class NestedObject(ObjectField): + def __init__(self, *args, **kwargs): + super(NestedObject, self).__init__(*args, **kwargs) + self.type = "nested" + + +class DocumentObjectField(ObjectField): + def __init__(self, _all=None, _boost=None, _id=None, + _index=None, _source=None, _type=None, date_formats=None, _routing=None, _ttl=None, + _parent=None, _timestamp=None, _analyzer=None, _size=None, date_detection=None, + numeric_detection=None, dynamic_date_formats=None, *args, **kwargs): + super(DocumentObjectField, self).__init__(*args, **kwargs) + self._timestamp = _timestamp + self._all = _all + if self._all is None: + #tnp defaults + self._all = {"enabled": False} + + self._boost = _boost + self._id = _id + self._index = _index + self._source = _source + self._routing = _routing + self._ttl = _ttl + self._analyzer = _analyzer + self._size = _size + + self._type = _type + if self._type is None: + self._type = {"store": "yes"} + + self._parent = _parent + self.date_detection = date_detection + self.numeric_detection = numeric_detection + self.dynamic_date_formats = dynamic_date_formats + + def enable_compression(self, threshold="5kb"): + self._source.update({"compress": True, "compression_threshold": threshold}) + + def as_dict(self): + result = super(DocumentObjectField, self).as_dict() + result['_type'] = self._type + if self._all is not None: + result['_all'] = self._all + if self._source is not None: + result['_source'] = self._source + if self._boost is not None: + result['_boost'] = self._boost + if self._routing is not None: + result['_routing'] = self._routing + if self._ttl is not None: + result['_ttl'] = self._ttl + if self._id is not None: + result['_id'] = self._id + if self._timestamp is not None: + result['_timestamp'] = self._timestamp + if self._index is not None: + result['_index'] = self._index + if self._parent is not None: + result['_parent'] = self._parent + if self._analyzer is not None: + result['_analyzer'] = self._analyzer + if self._size is not None: + result['_size'] = self._size + + if self.date_detection is not None: + result['date_detection'] = self.date_detection + if self.numeric_detection is not None: + result['numeric_detection'] = self.numeric_detection + if self.dynamic_date_formats is not None: + result['dynamic_date_formats'] = self.dynamic_date_formats + + return result + + def add_property(self, prop): + """ + Add a property to the object + """ + self.properties[prop.name] = prop + + def __repr__(self): + return u"<DocumentObjectField:%s>" % self.name + + + def save(self): + if self.connection is None: + raise RuntimeError("No connection available") + self.connection.put_mapping(doc_type=self.name, mapping=self.as_dict(), indices=self.index_name) + + +def get_field(name, data, default="object", document_object_field=None): + """ + Return a valid Field by given data + """ + if isinstance(data, AbstractField): + return data + data = keys_to_string(data) + _type = data.get('type', default) + if _type == "string": + return StringField(name=name, **data) + elif _type == "boolean": + return BooleanField(name=name, **data) + elif _type == "short": + return ShortField(name=name, **data) + elif _type == "integer": + return IntegerField(name=name, **data) + elif _type == "long": + return LongField(name=name, **data) + elif _type == "float": + return FloatField(name=name, **data) + elif _type == "double": + return DoubleField(name=name, **data) + elif _type == "ip": + return IpField(name=name, **data) + elif _type == "date": + return DateField(name=name, **data) + elif _type == "multi_field": + return MultiField(name=name, **data) + elif _type == "geo_point": + return GeoPointField(name=name, **data) + elif _type == "attachment": + return AttachmentField(name=name, **data) + elif _type == "document": + if document_object_field: + return document_object_field(name=name, **data) + else: + return DocumentObjectField(name=name, **data) + + elif _type == "object": + if '_timestamp' in data or "_all" in data: + if document_object_field: + return document_object_field(name=name, **data) + else: + return DocumentObjectField(name=name, **data) + + return ObjectField(name=name, **data) + elif _type == "nested": + return NestedObject(name=name, **data) + raise RuntimeError("Invalid type: %s" % _type) + + +class Mapper(object): + def __init__(self, data, connection=None, is_mapping=False, document_object_field=None): + """ + Create a mapper object + + :param data: a dict containing the mappings + :param connection: a connection object + :param is_mapping: if it's a mapping or index/mapping + :param document_object_field: the kind of object to be used for document object Field + :return: + """ + self.indices = {} + self.mappings = {} + self.is_mapping = is_mapping + self.connection = connection + self.document_object_field = document_object_field + self._process(data) + + def _process(self, data): + """ + Process indexer data + """ + if self.is_mapping: + for docname, docdata in data.items(): + self.mappings[docname] = get_field(docname, docdata, "document", + document_object_field=self.document_object_field) + else: + for indexname, indexdata in data.items(): + self.indices[indexname] = {} + for docname, docdata in indexdata.items(): + o = get_field(docname, docdata, "document", + document_object_field=self.document_object_field) + o.connection = self.connection + o.index_name = indexname + self.indices[indexname][docname] = o + + def get_doctype(self, index, name): + """ + Returns a doctype given an index and a name + """ + return self.indices[index][name] + + def get_property(self, index, doctype, name): + """ + Returns a property of a given type + + :return a mapped property + """ + + return self.indices[index][doctype].properties[name] + +MAPPING_NAME_TYPE = { + "attachment": AttachmentField, + "boolean": BooleanField, + "date": DateField, + "double": DoubleField, + "float": FloatField, + "geopoint": GeoPointField, + "integer": IntegerField, + "ip": IpField, + "long": LongField, + "multifield": MultiField, + "nested": NestedObject, + "short": ShortField, + "string": StringField +} + diff --git a/src/archivematicaCommon/lib/externals/pyes/pyesthrift/Rest-remote b/src/archivematicaCommon/lib/externals/pyes/pyesthrift/Rest-remote new file mode 100644 index 0000000000..06bd6c3bd8 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/pyesthrift/Rest-remote @@ -0,0 +1,85 @@ +#!/usr/bin/env python +# +# Autogenerated by Thrift +# +# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING +# + +import sys +import pprint +from urlparse import urlparse +from thrift.transport import TTransport +from thrift.transport import TSocket +from thrift.transport import THttpClient +from thrift.protocol import TBinaryProtocol + +import Rest +from ttypes import * + +if len(sys.argv) <= 1 or sys.argv[1] == '--help': + print '' + print 'Usage: ' + sys.argv[0] + ' [-h host:port] [-u url] [-f[ramed]] function [arg1 [arg2...]]' + print '' + print 'Functions:' + print ' RestResponse execute(RestRequest request)' + print '' + sys.exit(0) + +pp = pprint.PrettyPrinter(indent = 2) +host = 'localhost' +port = 9090 +uri = '' +framed = False +http = False +argi = 1 + +if sys.argv[argi] == '-h': + parts = sys.argv[argi+1].split(':') + host = parts[0] + port = int(parts[1]) + argi += 2 + +if sys.argv[argi] == '-u': + url = urlparse(sys.argv[argi+1]) + parts = url[1].split(':') + host = parts[0] + if len(parts) > 1: + port = int(parts[1]) + else: + port = 80 + uri = url[2] + if url[4]: + uri += '?%s' % url[4] + http = True + argi += 2 + +if sys.argv[argi] == '-f' or sys.argv[argi] == '-framed': + framed = True + argi += 1 + +cmd = sys.argv[argi] +args = sys.argv[argi+1:] + +if http: + transport = THttpClient.THttpClient(host, port, uri) +else: + socket = TSocket.TSocket(host, port) + if framed: + transport = TTransport.TFramedTransport(socket) + else: + transport = TTransport.TBufferedTransport(socket) +protocol = TBinaryProtocol.TBinaryProtocol(transport) +client = Rest.Client(protocol) +transport.open() + +if cmd == 'execute': + if len(args) != 1: + print 'execute requires 1 args' + sys.exit(1) + pp.pprint(client.execute(eval(args[0]),)) + +else: + print 'Unrecognized method %s' % cmd + sys.exit(1) + +transport.close() diff --git a/src/archivematicaCommon/lib/externals/pyes/pyesthrift/Rest.py b/src/archivematicaCommon/lib/externals/pyes/pyesthrift/Rest.py new file mode 100644 index 0000000000..43476f7b5d --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/pyesthrift/Rest.py @@ -0,0 +1,219 @@ +# +# Autogenerated by Thrift +# +# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING +# + +from thrift.Thrift import * +from ttypes import * +from thrift.Thrift import TProcessor +from thrift.transport import TTransport +from thrift.protocol import TBinaryProtocol, TProtocol +try: + from thrift.protocol import fastbinary +except ImportError: + fastbinary = None + + +class Iface: + def execute(self, request): + """ + Parameters: + - request + """ + pass + + +class Client(Iface): + def __init__(self, iprot, oprot=None): + self._iprot = self._oprot = iprot + if oprot != None: + self._oprot = oprot + self._seqid = 0 + + def execute(self, request): + """ + Parameters: + - request + """ + self.send_execute(request) + return self.recv_execute() + + def send_execute(self, request): + self._oprot.writeMessageBegin('execute', TMessageType.CALL, self._seqid) + args = execute_args() + args.request = request + args.write(self._oprot) + self._oprot.writeMessageEnd() + self._oprot.trans.flush() + + def recv_execute(self): + (fname, mtype, rseqid) = self._iprot.readMessageBegin() + if mtype == TMessageType.EXCEPTION: + x = TApplicationException() + x.read(self._iprot) + self._iprot.readMessageEnd() + raise x + result = execute_result() + result.read(self._iprot) + self._iprot.readMessageEnd() + if result.success != None: + return result.success + raise TApplicationException(TApplicationException.MISSING_RESULT, "execute failed: unknown result"); + + +class Processor(Iface, TProcessor): + def __init__(self, handler): + self._handler = handler + self._processMap = {} + self._processMap["execute"] = Processor.process_execute + + def process(self, iprot, oprot): + (name, type, seqid) = iprot.readMessageBegin() + if name not in self._processMap: + iprot.skip(TType.STRUCT) + iprot.readMessageEnd() + x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name)) + oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid) + x.write(oprot) + oprot.writeMessageEnd() + oprot.trans.flush() + return + else: + self._processMap[name](self, seqid, iprot, oprot) + return True + + def process_execute(self, seqid, iprot, oprot): + args = execute_args() + args.read(iprot) + iprot.readMessageEnd() + result = execute_result() + result.success = self._handler.execute(args.request) + oprot.writeMessageBegin("execute", TMessageType.REPLY, seqid) + result.write(oprot) + oprot.writeMessageEnd() + oprot.trans.flush() + + +# HELPER FUNCTIONS AND STRUCTURES + +class execute_args: + """ + Attributes: + - request + """ + + thrift_spec = ( + None, # 0 + (1, TType.STRUCT, 'request', (RestRequest, RestRequest.thrift_spec), None, ), # 1 + ) + + def __init__(self, request=None): + self.request = request + + def read(self, iprot): + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) + return + iprot.readStructBegin() + while True: + (fname, ftype, fid) = iprot.readFieldBegin() + if ftype == TType.STOP: + break + if fid == 1: + if ftype == TType.STRUCT: + self.request = RestRequest() + self.request.read(iprot) + else: + iprot.skip(ftype) + else: + iprot.skip(ftype) + iprot.readFieldEnd() + iprot.readStructEnd() + + def write(self, oprot): + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) + return + oprot.writeStructBegin('execute_args') + if self.request != None: + oprot.writeFieldBegin('request', TType.STRUCT, 1) + self.request.write(oprot) + oprot.writeFieldEnd() + oprot.writeFieldStop() + oprot.writeStructEnd() + def validate(self): + if self.request is None: + raise TProtocol.TProtocolException(message='Required field request is unset!') + return + + + def __repr__(self): + L = ['%s=%r' % (key, value) + for key, value in self.__dict__.iteritems()] + return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) + + def __eq__(self, other): + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ + + def __ne__(self, other): + return not (self == other) + +class execute_result: + """ + Attributes: + - success + """ + + thrift_spec = ( + (0, TType.STRUCT, 'success', (RestResponse, RestResponse.thrift_spec), None, ), # 0 + ) + + def __init__(self, success=None): + self.success = success + + def read(self, iprot): + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) + return + iprot.readStructBegin() + while True: + (fname, ftype, fid) = iprot.readFieldBegin() + if ftype == TType.STOP: + break + if fid == 0: + if ftype == TType.STRUCT: + self.success = RestResponse() + self.success.read(iprot) + else: + iprot.skip(ftype) + else: + iprot.skip(ftype) + iprot.readFieldEnd() + iprot.readStructEnd() + + def write(self, oprot): + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) + return + oprot.writeStructBegin('execute_result') + if self.success != None: + oprot.writeFieldBegin('success', TType.STRUCT, 0) + self.success.write(oprot) + oprot.writeFieldEnd() + oprot.writeFieldStop() + oprot.writeStructEnd() + def validate(self): + return + + + def __repr__(self): + L = ['%s=%r' % (key, value) + for key, value in self.__dict__.iteritems()] + return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) + + def __eq__(self, other): + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ + + def __ne__(self, other): + return not (self == other) diff --git a/src/archivematicaCommon/lib/externals/pyes/pyesthrift/__init__.py b/src/archivematicaCommon/lib/externals/pyes/pyesthrift/__init__.py new file mode 100644 index 0000000000..bbc702ed80 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/pyesthrift/__init__.py @@ -0,0 +1 @@ +__all__ = ['ttypes', 'constants', 'Rest'] diff --git a/src/archivematicaCommon/lib/externals/pyes/pyesthrift/constants.py b/src/archivematicaCommon/lib/externals/pyes/pyesthrift/constants.py new file mode 100644 index 0000000000..2f17ec34fe --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/pyesthrift/constants.py @@ -0,0 +1,9 @@ +# +# Autogenerated by Thrift +# +# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING +# + +from thrift.Thrift import * +from ttypes import * + diff --git a/src/archivematicaCommon/lib/externals/pyes/pyesthrift/simple_test.py b/src/archivematicaCommon/lib/externals/pyes/pyesthrift/simple_test.py new file mode 100644 index 0000000000..798f7031e7 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/pyesthrift/simple_test.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python +import sys +import pprint +from urlparse import urlparse +from thrift.transport import TTransport +from thrift.transport import TSocket +from thrift.transport import THttpClient +from thrift.protocol import TBinaryProtocol + +import Rest +from ttypes import * + +pp = pprint.PrettyPrinter(indent = 4) +host = '127.0.0.1' +port = 9500 +uri = '' +framed = False +http = False +argi = 1 + +socket = TSocket.TSocket(host, port) +transport = TTransport.TBufferedTransport(socket) +protocol = TBinaryProtocol.TBinaryProtocol(transport) +client = Rest.Client(protocol) +transport.open() + +res = RestRequest(0, "/test-index/test-type/1", {}, {}) +print client.execute(res) + +transport.close() diff --git a/src/archivematicaCommon/lib/externals/pyes/pyesthrift/ttypes.py b/src/archivematicaCommon/lib/externals/pyes/pyesthrift/ttypes.py new file mode 100644 index 0000000000..df6eabaae0 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/pyesthrift/ttypes.py @@ -0,0 +1,408 @@ +# +# Autogenerated by Thrift +# +# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING +# + +from thrift.Thrift import * + +from thrift.transport import TTransport +from thrift.protocol import TBinaryProtocol, TProtocol +try: + from thrift.protocol import fastbinary +except ImportError: + fastbinary = None + + +class Method(object): + GET = 0 + PUT = 1 + POST = 2 + DELETE = 3 + HEAD = 4 + OPTIONS = 5 + + _VALUES_TO_NAMES = { + 0: "GET", + 1: "PUT", + 2: "POST", + 3: "DELETE", + 4: "HEAD", + 5: "OPTIONS", + } + + _NAMES_TO_VALUES = { + "GET": 0, + "PUT": 1, + "POST": 2, + "DELETE": 3, + "HEAD": 4, + "OPTIONS": 5, + } + +class Status(object): + CONT = 100 + SWITCHING_PROTOCOLS = 101 + OK = 200 + CREATED = 201 + ACCEPTED = 202 + NON_AUTHORITATIVE_INFORMATION = 203 + NO_CONTENT = 204 + RESET_CONTENT = 205 + PARTIAL_CONTENT = 206 + MULTI_STATUS = 207 + MULTIPLE_CHOICES = 300 + MOVED_PERMANENTLY = 301 + FOUND = 302 + SEE_OTHER = 303 + NOT_MODIFIED = 304 + USE_PROXY = 305 + TEMPORARY_REDIRECT = 307 + BAD_REQUEST = 400 + UNAUTHORIZED = 401 + PAYMENT_REQUIRED = 402 + FORBIDDEN = 403 + NOT_FOUND = 404 + METHOD_NOT_ALLOWED = 405 + NOT_ACCEPTABLE = 406 + PROXY_AUTHENTICATION = 407 + REQUEST_TIMEOUT = 408 + CONFLICT = 409 + GONE = 410 + LENGTH_REQUIRED = 411 + PRECONDITION_FAILED = 412 + REQUEST_ENTITY_TOO_LARGE = 413 + REQUEST_URI_TOO_LONG = 414 + UNSUPPORTED_MEDIA_TYPE = 415 + REQUESTED_RANGE_NOT_SATISFIED = 416 + EXPECTATION_FAILED = 417 + UNPROCESSABLE_ENTITY = 422 + LOCKED = 423 + FAILED_DEPENDENCY = 424 + INTERNAL_SERVER_ERROR = 500 + NOT_IMPLEMENTED = 501 + BAD_GATEWAY = 502 + SERVICE_UNAVAILABLE = 503 + GATEWAY_TIMEOUT = 504 + INSUFFICIENT_STORAGE = 506 + + _VALUES_TO_NAMES = { + 100: "CONT", + 101: "SWITCHING_PROTOCOLS", + 200: "OK", + 201: "CREATED", + 202: "ACCEPTED", + 203: "NON_AUTHORITATIVE_INFORMATION", + 204: "NO_CONTENT", + 205: "RESET_CONTENT", + 206: "PARTIAL_CONTENT", + 207: "MULTI_STATUS", + 300: "MULTIPLE_CHOICES", + 301: "MOVED_PERMANENTLY", + 302: "FOUND", + 303: "SEE_OTHER", + 304: "NOT_MODIFIED", + 305: "USE_PROXY", + 307: "TEMPORARY_REDIRECT", + 400: "BAD_REQUEST", + 401: "UNAUTHORIZED", + 402: "PAYMENT_REQUIRED", + 403: "FORBIDDEN", + 404: "NOT_FOUND", + 405: "METHOD_NOT_ALLOWED", + 406: "NOT_ACCEPTABLE", + 407: "PROXY_AUTHENTICATION", + 408: "REQUEST_TIMEOUT", + 409: "CONFLICT", + 410: "GONE", + 411: "LENGTH_REQUIRED", + 412: "PRECONDITION_FAILED", + 413: "REQUEST_ENTITY_TOO_LARGE", + 414: "REQUEST_URI_TOO_LONG", + 415: "UNSUPPORTED_MEDIA_TYPE", + 416: "REQUESTED_RANGE_NOT_SATISFIED", + 417: "EXPECTATION_FAILED", + 422: "UNPROCESSABLE_ENTITY", + 423: "LOCKED", + 424: "FAILED_DEPENDENCY", + 500: "INTERNAL_SERVER_ERROR", + 501: "NOT_IMPLEMENTED", + 502: "BAD_GATEWAY", + 503: "SERVICE_UNAVAILABLE", + 504: "GATEWAY_TIMEOUT", + 506: "INSUFFICIENT_STORAGE", + } + + _NAMES_TO_VALUES = { + "CONT": 100, + "SWITCHING_PROTOCOLS": 101, + "OK": 200, + "CREATED": 201, + "ACCEPTED": 202, + "NON_AUTHORITATIVE_INFORMATION": 203, + "NO_CONTENT": 204, + "RESET_CONTENT": 205, + "PARTIAL_CONTENT": 206, + "MULTI_STATUS": 207, + "MULTIPLE_CHOICES": 300, + "MOVED_PERMANENTLY": 301, + "FOUND": 302, + "SEE_OTHER": 303, + "NOT_MODIFIED": 304, + "USE_PROXY": 305, + "TEMPORARY_REDIRECT": 307, + "BAD_REQUEST": 400, + "UNAUTHORIZED": 401, + "PAYMENT_REQUIRED": 402, + "FORBIDDEN": 403, + "NOT_FOUND": 404, + "METHOD_NOT_ALLOWED": 405, + "NOT_ACCEPTABLE": 406, + "PROXY_AUTHENTICATION": 407, + "REQUEST_TIMEOUT": 408, + "CONFLICT": 409, + "GONE": 410, + "LENGTH_REQUIRED": 411, + "PRECONDITION_FAILED": 412, + "REQUEST_ENTITY_TOO_LARGE": 413, + "REQUEST_URI_TOO_LONG": 414, + "UNSUPPORTED_MEDIA_TYPE": 415, + "REQUESTED_RANGE_NOT_SATISFIED": 416, + "EXPECTATION_FAILED": 417, + "UNPROCESSABLE_ENTITY": 422, + "LOCKED": 423, + "FAILED_DEPENDENCY": 424, + "INTERNAL_SERVER_ERROR": 500, + "NOT_IMPLEMENTED": 501, + "BAD_GATEWAY": 502, + "SERVICE_UNAVAILABLE": 503, + "GATEWAY_TIMEOUT": 504, + "INSUFFICIENT_STORAGE": 506, + } + + +class RestRequest(object): + """ + Attributes: + - method + - uri + - parameters + - headers + - body + """ + + thrift_spec = ( + None, # 0 + (1, TType.I32, 'method', None, None, ), # 1 + (2, TType.STRING, 'uri', None, None, ), # 2 + (3, TType.MAP, 'parameters', (TType.STRING,None,TType.STRING,None), None, ), # 3 + (4, TType.MAP, 'headers', (TType.STRING,None,TType.STRING,None), None, ), # 4 + (5, TType.STRING, 'body', None, None, ), # 5 + ) + + def __init__(self, method=None, uri=None, parameters=None, headers=None, body=None): + self.method = method + self.uri = uri + self.parameters = parameters + self.headers = headers + self.body = body + + def read(self, iprot): + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) + return + iprot.readStructBegin() + while True: + (fname, ftype, fid) = iprot.readFieldBegin() + if ftype == TType.STOP: + break + if fid == 1: + if ftype == TType.I32: + self.method = iprot.readI32(); + else: + iprot.skip(ftype) + elif fid == 2: + if ftype == TType.STRING: + self.uri = iprot.readString(); + else: + iprot.skip(ftype) + elif fid == 3: + if ftype == TType.MAP: + self.parameters = {} + (_ktype1, _vtype2, _size0 ) = iprot.readMapBegin() + for _i4 in xrange(_size0): + _key5 = iprot.readString(); + _val6 = iprot.readString(); + self.parameters[_key5] = _val6 + iprot.readMapEnd() + else: + iprot.skip(ftype) + elif fid == 4: + if ftype == TType.MAP: + self.headers = {} + (_ktype8, _vtype9, _size7 ) = iprot.readMapBegin() + for _i11 in xrange(_size7): + _key12 = iprot.readString(); + _val13 = iprot.readString(); + self.headers[_key12] = _val13 + iprot.readMapEnd() + else: + iprot.skip(ftype) + elif fid == 5: + if ftype == TType.STRING: + self.body = iprot.readString(); + else: + iprot.skip(ftype) + else: + iprot.skip(ftype) + iprot.readFieldEnd() + iprot.readStructEnd() + + def write(self, oprot): + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) + return + oprot.writeStructBegin('RestRequest') + if self.method != None: + oprot.writeFieldBegin('method', TType.I32, 1) + oprot.writeI32(self.method) + oprot.writeFieldEnd() + if self.uri != None: + oprot.writeFieldBegin('uri', TType.STRING, 2) + oprot.writeString(self.uri) + oprot.writeFieldEnd() + if self.parameters != None: + oprot.writeFieldBegin('parameters', TType.MAP, 3) + oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.parameters)) + for kiter14,viter15 in self.parameters.items(): + oprot.writeString(kiter14) + oprot.writeString(viter15) + oprot.writeMapEnd() + oprot.writeFieldEnd() + if self.headers != None: + oprot.writeFieldBegin('headers', TType.MAP, 4) + oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.headers)) + for kiter16,viter17 in self.headers.items(): + oprot.writeString(kiter16) + oprot.writeString(viter17) + oprot.writeMapEnd() + oprot.writeFieldEnd() + if self.body != None: + oprot.writeFieldBegin('body', TType.STRING, 5) + oprot.writeString(self.body) + oprot.writeFieldEnd() + oprot.writeFieldStop() + oprot.writeStructEnd() + def validate(self): + if self.method is None: + raise TProtocol.TProtocolException(message='Required field method is unset!') + if self.uri is None: + raise TProtocol.TProtocolException(message='Required field uri is unset!') + return + + + def __repr__(self): + L = ['%s=%r' % (key, value) + for key, value in self.__dict__.iteritems()] + return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) + + def __eq__(self, other): + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ + + def __ne__(self, other): + return not (self == other) + +class RestResponse(object): + """ + Attributes: + - status + - headers + - body + """ + + thrift_spec = ( + None, # 0 + (1, TType.I32, 'status', None, None, ), # 1 + (2, TType.MAP, 'headers', (TType.STRING,None,TType.STRING,None), None, ), # 2 + (3, TType.STRING, 'body', None, None, ), # 3 + ) + + def __init__(self, status=None, headers=None, body=None): + self.status = status + self.headers = headers + self.body = body + + def read(self, iprot): + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) + return + iprot.readStructBegin() + while True: + (fname, ftype, fid) = iprot.readFieldBegin() + if ftype == TType.STOP: + break + if fid == 1: + if ftype == TType.I32: + self.status = iprot.readI32(); + else: + iprot.skip(ftype) + elif fid == 2: + if ftype == TType.MAP: + self.headers = {} + (_ktype19, _vtype20, _size18 ) = iprot.readMapBegin() + for _i22 in xrange(_size18): + _key23 = iprot.readString(); + _val24 = iprot.readString(); + self.headers[_key23] = _val24 + iprot.readMapEnd() + else: + iprot.skip(ftype) + elif fid == 3: + if ftype == TType.STRING: + self.body = iprot.readString(); + else: + iprot.skip(ftype) + else: + iprot.skip(ftype) + iprot.readFieldEnd() + iprot.readStructEnd() + + def write(self, oprot): + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) + return + oprot.writeStructBegin('RestResponse') + if self.status != None: + oprot.writeFieldBegin('status', TType.I32, 1) + oprot.writeI32(self.status) + oprot.writeFieldEnd() + if self.headers != None: + oprot.writeFieldBegin('headers', TType.MAP, 2) + oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.headers)) + for kiter25,viter26 in self.headers.items(): + oprot.writeString(kiter25) + oprot.writeString(viter26) + oprot.writeMapEnd() + oprot.writeFieldEnd() + if self.body != None: + oprot.writeFieldBegin('body', TType.STRING, 3) + oprot.writeString(self.body) + oprot.writeFieldEnd() + oprot.writeFieldStop() + oprot.writeStructEnd() + def validate(self): + if self.status is None: + raise TProtocol.TProtocolException(message='Required field status is unset!') + return + + + def __repr__(self): + L = ['%s=%r' % (key, value) + for key, value in self.__dict__.iteritems()] + return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) + + def __eq__(self, other): + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ + + def __ne__(self, other): + return not (self == other) diff --git a/src/archivematicaCommon/lib/externals/pyes/query.py b/src/archivematicaCommon/lib/externals/pyes/query.py new file mode 100644 index 0000000000..452fd28eb5 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/query.py @@ -0,0 +1,1403 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +try: + # For Python >= 2.6 + import json +except ImportError: + # For Python < 2.6 or people using a newer version of simplejson + import simplejson as json + +from .utils import clean_string, ESRange, EqualityComparableUsingAttributeDictionary +from .facets import FacetFactory +from .highlight import HighLighter +from .scriptfields import ScriptFields +from .exceptions import InvalidQuery, InvalidParameterQuery, QueryError, ScriptFieldsError +from .es import ES + +class FieldParameter(EqualityComparableUsingAttributeDictionary): + def __init__(self, field, + query, + default_operator="OR", + analyzer=None, + allow_leading_wildcard=True, + lowercase_expanded_terms=True, + enable_position_increments=True, + fuzzy_prefix_length=0, + fuzzy_min_sim=0.5, + phrase_slop=0, + boost=1.0): + self.query = query + self.field = field + self.default_operator = default_operator + self.analyzer = analyzer + self.allow_leading_wildcard = allow_leading_wildcard + self.lowercase_expanded_terms = lowercase_expanded_terms + self.enable_position_increments = enable_position_increments + self.fuzzy_prefix_length = fuzzy_prefix_length + self.fuzzy_min_sim = fuzzy_min_sim + self.phrase_slop = phrase_slop + self.boost = boost + + def serialize(self): + filters = {} + + if self.default_operator != "OR": + filters["default_operator"] = self.default_operator + if self.analyzer: + filters["analyzer"] = self.analyzer + if not self.allow_leading_wildcard: + filters["allow_leading_wildcard"] = self.allow_leading_wildcard + if not self.lowercase_expanded_terms: + filters["lowercase_expanded_terms"] = self.lowercase_expanded_terms + if not self.enable_position_increments: + filters["enable_position_increments"] = self.enable_position_increments + if self.fuzzy_prefix_length: + filters["fuzzy_prefix_length"] = self.fuzzy_prefix_length + if self.fuzzy_min_sim != 0.5: + filters["fuzzy_min_sim"] = self.fuzzy_min_sim + if self.phrase_slop: + filters["phrase_slop"] = self.phrase_slop + + if self.boost != 1.0: + filters["boost"] = self.boost + if filters: + filters["query"] = self.query + else: + filters = self.query + return self.field, filters + + +class Search(EqualityComparableUsingAttributeDictionary): + """A search to be performed. + + This contains a query, and has additional parameters which are used to + control how the search works, what it should return, etc. + + Example: + + q = StringQuery('elasticsearch') + s = Search(q, fields=['title', 'author'], start=100, size=50) + results = conn.search(s) + + """ + + def __init__(self, query=None, filter=None, fields=None, start=None, size=None, highlight=None, sort=None, + explain=False, facet=None, version=None, track_scores=None, script_fields=None, index_boost=None, + min_score=None, stats=None, bulk_read=None, partial_fields=None): + """ + fields: if is [], the _source is not returned + """ + if not index_boost: index_boost = {} + self.query = query + self.filter = filter + self.fields = fields + self.start = start + self.size = size + self._highlight = highlight + self.sort = sort + self.explain = explain + self.facet = facet or FacetFactory() + self.version = version + self.track_scores = track_scores + self.script_fields = script_fields + self.index_boost = index_boost + self.min_score = min_score + self.stats = stats + self.bulk_read = bulk_read + self.partial_fields = partial_fields + + def get_facet_factory(self): + """ + Returns the facet factory + """ + return self.facet + + @property + def q(self): + return self.serialize() + + def serialize(self): + """Serialize the search to a structure as passed for a search body. + + """ + res = {} + if self.query: + if isinstance(self.query, dict): + res["query"] = self.query + elif hasattr(self.query, "serialize"): + res["query"] = self.query.serialize() + else: + raise InvalidQuery("Invalid query") + if self.filter: + res['filter'] = self.filter.serialize() + if self.fields is not None: + res['fields'] = self.fields + if self.size is not None: + res['size'] = self.size + if self.start is not None: + res['from'] = self.start + if self._highlight: + res['highlight'] = self._highlight.serialize() + if self.sort: + res['sort'] = self.sort + if self.explain: + res['explain'] = self.explain + if self.version: + res['version'] = self.version + if self.track_scores: + res['track_scores'] = self.track_scores + if self.script_fields: + if isinstance(self.script_fields, ScriptFields): + res['script_fields'] = self.script_fields.serialize() + else: + raise ScriptFieldsError("Parameter script_fields should of type ScriptFields") + if self.index_boost: + res['indices_boost'] = self.index_boost + if self.min_score: + res['min_score'] = self.min_score + if self.facet.facets: + res.update(self.facet.q) + if self.stats: + res['stats'] = self.stats + if self.partial_fields: + res['partial_fields'] = self.partial_fields + return res + + @property + def highlight(self): + if self._highlight is None: + self._highlight = HighLighter("<b>", "</b>") + return self._highlight + + def add_highlight(self, field, fragment_size=None, + number_of_fragments=None, fragment_offset=None): + """Add a highlight field. + + The Search object will be returned, so calls to this can be chained. + + """ + if self._highlight is None: + self._highlight = HighLighter("<b>", "</b>") + self._highlight.add_field(field, fragment_size, number_of_fragments, fragment_offset) + return self + + def add_index_boost(self, index, boost): + """Add a boost on an index. + + The Search object will be returned, so calls to this can be chained. + + """ + if boost is None: + if self.index_boost.has_key(index): + del(self.index_boost[index]) + else: + self.index_boost[index] = boost + return self + + def __repr__(self): + return str(self.q) + + def to_search_json(self): + """Convert the search to JSON. + + The output of this is suitable for using as the request body for + search. + + """ + return json.dumps(self.q, cls=ES.encoder) + + +class Query(EqualityComparableUsingAttributeDictionary): + """Base class for all queries. + + """ + + def __init__(self, *args, **kwargs): + if len(args) > 0 or len(kwargs) > 0: + raise RuntimeWarning("No all parameters are processed by derivated query object") + + def serialize(self): + """Serialize the query to a structure using the query DSL. + + """ + raise NotImplementedError() + + def search(self, **kwargs): + """Return this query wrapped in a Search object. + + Any keyword arguments supplied to this call will be passed to the + Search object. + + """ + return Search(query=self, **kwargs) + + def to_search_json(self): + """Convert the query to JSON suitable for searching with. + + The output of this is suitable for using as the request body for + search. + + """ + return json.dumps(dict(query=self.serialize()), cls=ES.encoder) + + def to_query_json(self): + """Convert the query to JSON using the query DSL. + + The output of this is suitable for using as the request body for count, + delete_by_query and reindex. + + """ + return json.dumps(self.serialize(), cls=ES.encoder) + + +class BoolQuery(Query): + """A boolean combination of other queries. + + BoolQuery maps to Lucene **BooleanQuery**. It is built using one or more + boolean clauses, each clause with a typed occurrence. The occurrence types + are: + + ================ ======================================================== + Occur Description + ================ ======================================================== + **must** The clause (query) must appear in matching documents. + **should** The clause (query) should appear in the matching + document. A boolean query with no **must** clauses, one + or more **should** clauses must match a document. The + minimum number of should clauses to match can be set + using **minimum_number_should_match** parameter. + **must_not** The clause (query) must not appear in the matching + documents. Note that it is not possible to search on + documents that only consists of a **must_not** clauses. + ================ ======================================================== + + The bool query also supports **disable_coord** parameter (defaults to + **false**). + + """ + + def __init__(self, must=None, must_not=None, should=None, + boost=None, minimum_number_should_match=1, + disable_coord=None, + **kwargs): + super(BoolQuery, self).__init__(**kwargs) + + self._must = [] + self._must_not = [] + self._should = [] + self.boost = boost + self.minimum_number_should_match = minimum_number_should_match + self.disable_coord = disable_coord + + if must: + self.add_must(must) + + if must_not: + self.add_must_not(must_not) + + if should: + self.add_should(should) + + def add_must(self, queries): + """Add a query to the "must" clause of the query. + + The Query object will be returned, so calls to this can be chained. + + """ + if isinstance(queries, list): + self._must.extend(queries) + else: + self._must.append(queries) + return self + + def add_should(self, queries): + """Add a query to the "should" clause of the query. + + The Query object will be returned, so calls to this can be chained. + + """ + if isinstance(queries, list): + self._should.extend(queries) + else: + self._should.append(queries) + return self + + def add_must_not(self, queries): + """Add a query to the "must_not" clause of the query. + + The Query object will be returned, so calls to this can be chained. + + """ + if isinstance(queries, list): + self._must_not.extend(queries) + else: + self._must_not.append(queries) + return self + + def is_empty(self): + if self._must: + return False + if self._must_not: + return False + if self._should: + return False + return True + + def serialize(self): + filters = {} + if self._must: + filters['must'] = [f.serialize() for f in self._must] + if self._must_not: + filters['must_not'] = [f.serialize() for f in self._must_not] + if self._should: + filters['should'] = [f.serialize() for f in self._should] + filters['minimum_number_should_match'] = self.minimum_number_should_match + if self.boost: + filters['boost'] = self.boost + if self.disable_coord is not None: + filters['disable_coord'] = self.disable_coord + if not filters: + raise RuntimeError("A least a filter must be declared") + return {"bool": filters} + + +class ConstantScoreQuery(Query): + """Returns a constant score for all documents matching a filter. + + Multiple filters may be supplied by passing a sequence or iterator as the + filter parameter. If multiple filters are supplied, documents must match + all of them to be matched by this query. + + """ + _internal_name = "constant_score" + + def __init__(self, filter=None, boost=1.0, **kwargs): + super(ConstantScoreQuery, self).__init__(**kwargs) + self.filters = [] + self.boost = boost + if filter: + self.add(filter) + + def add(self, filter): + """Add a filter, or a list of filters, to the query. + + If a sequence of filters is supplied, they are all added, and will be + combined with an ANDFilter. + + """ + from pyes.filters import Filter + + if isinstance(filter, Filter): + self.filters.append(filter) + else: + self.filters.extend(filter) + return self + + def is_empty(self): + """Returns True if the query is empty. + + """ + if self.filters: + return False + return True + + def serialize(self): + data = {} + + if self.boost != 1.0: + data["boost"] = self.boost + filters = {} + if len(self.filters) == 1: + filters.update(self.filters[0].serialize()) + else: + from pyes import ANDFilter + + filters.update(ANDFilter(self.filters).serialize()) + if not filters: + raise QueryError("A filter is required") + data['filter'] = filters + return {self._internal_name: data} + + +class HasChildQuery(Query): + _internal_name = "has_child" + + def __init__(self, type, query, _scope=None, **kwargs): + super(HasChildQuery, self).__init__(**kwargs) + self.type = type + self._scope = _scope + self.query = query + + def serialize(self): + data = { + 'type': self.type, + 'query': self.query.serialize()} + if self._scope is not None: + data['_scope'] = self._scope + return {self._internal_name: data} + + +class TopChildrenQuery(ConstantScoreQuery): + _internal_name = "top_children" + + def __init__(self, type, score="max", factor=5, incremental_factor=2, + **kwargs): + super(TopChildrenQuery, self).__init__(**kwargs) + self.type = type + self.score = score + self.factor = factor + self.incremental_factor = incremental_factor + + def serialize(self): + filters = {} + + if self.boost != 1.0: + filters["boost"] = self.boost + + for f in self.filters: + filters.update(f.serialize()) + + if self.score not in ["max", "min", "avg"]: + raise InvalidParameterQuery("Invalid value '%s' for score" % self.score) + + return {self._internal_name: { + 'type': self.type, + 'query': filters, + 'score': self.score, + 'factor': self.factor, + "incremental_factor": self.incremental_factor}} + + +class NestedQuery(Query): + """ + Nested query allows to query nested objects / docs (see nested mapping). + The query is executed against the nested objects / docs as if they were + indexed as separate docs (they are, internally) and resulting in the root + parent doc (or parent nested mapping). + + The query path points to the nested object path, and the query (or filter) + includes the query that will run on the nested docs matching the direct + path, and joining with the root parent docs. + + The score_mode allows to set how inner children matching affects scoring of + parent. It defaults to avg, but can be total, max and none. + + Multi level nesting is automatically supported, and detected, resulting in + an inner nested query to automatically match the relevant nesting level + (and not root) if it exists within another nested query. + """ + _internal_name = "nested" + + def __init__(self, path, query, _scope=None, score_mode="avg", **kwargs): + super(NestedQuery, self).__init__(**kwargs) + self.path = path + self.score_mode = score_mode + self.query = query + self._scope = _scope + + def serialize(self): + if self.score_mode and self.score_mode not in ['avg', "total", "max"]: + raise InvalidParameterQuery("Invalid score_mode: %s" % self.score_mode) + data = { + 'path': self.path, + 'score_mode': self.score_mode, + 'query': self.query.serialize()} + if self._scope is not None: + data['_scope'] = self._scope + return {self._internal_name: data} + + +class DisMaxQuery(Query): + _internal_name = "dis_max" + + def __init__(self, query=None, tie_breaker=0.0, boost=1.0, queries=None, **kwargs): + super(DisMaxQuery, self).__init__(**kwargs) + self.queries = queries or [] + self.tie_breaker = tie_breaker + self.boost = boost + if query: + self.add(query) + + def add(self, query): + if isinstance(query, list): + self.queries.extend(query) + else: + self.queries.append(query) + return self + + def serialize(self): + filters = {} + + if self.tie_breaker != 0.0: + filters["tie_breaker"] = self.tie_breaker + + if self.boost != 1.0: + filters["boost"] = self.boost + + filters["queries"] = [q.serialize() for q in self.queries] + if not filters["queries"]: + raise InvalidQuery("A least a query is required") + return {self._internal_name: filters} + + +class FieldQuery(Query): + _internal_name = "field" + + def __init__(self, fieldparameters=None, default_operator="OR", + analyzer=None, + allow_leading_wildcard=True, + lowercase_expanded_terms=True, + enable_position_increments=True, + fuzzy_prefix_length=0, + fuzzy_min_sim=0.5, + phrase_slop=0, + boost=1.0, + use_dis_max=True, + tie_breaker=0, **kwargs): + super(FieldQuery, self).__init__(**kwargs) + self.field_parameters = [] + self.default_operator = default_operator + self.analyzer = analyzer + self.allow_leading_wildcard = allow_leading_wildcard + self.lowercase_expanded_terms = lowercase_expanded_terms + self.enable_position_increments = enable_position_increments + self.fuzzy_prefix_length = fuzzy_prefix_length + self.fuzzy_min_sim = fuzzy_min_sim + self.phrase_slop = phrase_slop + self.boost = boost + self.use_dis_max = use_dis_max + self.tie_breaker = tie_breaker + if fieldparameters: + if isinstance(fieldparameters, list): + self.field_parameters.extend(fieldparameters) + else: + self.field_parameters.append(fieldparameters) + + def add(self, field, query, **kwargs): + fp = FieldParameter(field, query, **kwargs) + self.field_parameters.append(fp) + + def serialize(self): + result = {} + for f in self.field_parameters: + val, filters = f.serialize() + result[val] = filters + + return {self._internal_name: result} + + +class FilteredQuery(Query): + """ + FilteredQuery allows for results to be filtered using the various filter classes. + + Example: + + t = TermFilter('name', 'john') + q = FilteredQuery(MatchAllQuery(), t) + results = conn.search(q) + + """ + _internal_name = "filtered" + + def __init__(self, query, filter, **kwargs): + super(FilteredQuery, self).__init__(**kwargs) + self.query = query + self.filter = filter + + def serialize(self): + filters = { + 'query': self.query.serialize(), + 'filter': self.filter.serialize(), + } + + return {self._internal_name: filters} + + +class MoreLikeThisFieldQuery(Query): + _internal_name = "more_like_this_field" + + def __init__(self, field, like_text, + percent_terms_to_match=0.3, + min_term_freq=2, + max_query_terms=25, + stop_words=None, + min_doc_freq=5, + max_doc_freq=None, + min_word_len=0, + max_word_len=0, + boost_terms=1, + boost=1.0, + **kwargs): + super(MoreLikeThisFieldQuery, self).__init__(**kwargs) + self.field = field + self.like_text = like_text + self.percent_terms_to_match = percent_terms_to_match + self.min_term_freq = min_term_freq + self.max_query_terms = max_query_terms + self.stop_words = stop_words or [] + self.min_doc_freq = min_doc_freq + self.max_doc_freq = max_doc_freq + self.min_word_len = min_word_len + self.max_word_len = max_word_len + self.boost_terms = boost_terms + self.boost = boost + + def serialize(self): + filters = {'like_text': self.like_text} + + if self.percent_terms_to_match != 0.3: + filters["percent_terms_to_match"] = self.percent_terms_to_match + if self.min_term_freq != 2: + filters["min_term_freq"] = self.min_term_freq + if self.max_query_terms != 25: + filters["max_query_terms"] = self.max_query_terms + if self.stop_words: + filters["stop_words"] = self.stop_words + if self.min_doc_freq != 5: + filters["min_doc_freq"] = self.min_doc_freq + if self.max_doc_freq: + filters["max_doc_freq"] = self.max_doc_freq + if self.min_word_len: + filters["min_word_len"] = self.min_word_len + if self.max_word_len: + filters["max_word_len"] = self.max_word_len + if self.boost_terms: + filters["boost_terms"] = self.boost_terms + + if self.boost != 1.0: + filters["boost"] = self.boost + return {self._internal_name: {self.field: filters}} + + +class FuzzyLikeThisQuery(Query): + _internal_name = "fuzzy_like_this" + + def __init__(self, fields, like_text, + ignore_tf=False, max_query_terms=25, + min_similarity=0.5, prefix_length=0, + boost=1.0, **kwargs): + super(FuzzyLikeThisQuery, self).__init__(**kwargs) + self.fields = fields + self.like_text = like_text + self.ignore_tf = ignore_tf + self.max_query_terms = max_query_terms + self.min_similarity = min_similarity + self.prefix_length = prefix_length + self.boost = boost + + def serialize(self): + filters = {'fields': self.fields, + 'like_text': self.like_text} + + if self.ignore_tf: + filters["ignore_tf"] = self.ignore_tf + if self.max_query_terms != 25: + filters["max_query_terms"] = self.max_query_terms + if self.min_similarity != 0.5: + filters["min_similarity"] = self.min_similarity + if self.prefix_length: + filters["prefix_length"] = self.prefix_length + if self.boost != 1.0: + filters["boost"] = self.boost + return {self._internal_name: filters} + + +class FuzzyQuery(Query): + """ + A fuzzy based query that uses similarity based on Levenshtein (edit distance) algorithm. + + Note + Warning: this query is not very scalable with its default prefix length of 0 - in this case, every term will be enumerated and cause an edit score calculation. Here is a simple example: + + """ + _internal_name = "fuzzy" + + def __init__(self, field, value, boost=None, + min_similarity=0.5, prefix_length=0, + **kwargs): + super(FuzzyQuery, self).__init__(**kwargs) + self.field = field + self.value = value + self.boost = boost + self.min_similarity = min_similarity + self.prefix_length = prefix_length + + def serialize(self): + data = { + 'value': self.value, + 'min_similarity': self.min_similarity, + 'prefix_length': self.prefix_length, + } + if self.boost: + data['boost'] = self.boost + return {self._internal_name: {self.field: data}} + + +class FuzzyLikeThisFieldQuery(Query): + _internal_name = "fuzzy_like_this_field" + + def __init__(self, field, like_text, + ignore_tf=False, max_query_terms=25, + boost=1.0, min_similarity=0.5, **kwargs): + super(FuzzyLikeThisFieldQuery, self).__init__(**kwargs) + self.field = field + self.like_text = like_text + self.ignore_tf = ignore_tf + self.max_query_terms = max_query_terms + self.min_similarity = min_similarity + self.boost = boost + + def serialize(self): + filters = {'like_text': self.like_text} + + if self.ignore_tf: + filters["ignore_tf"] = self.ignore_tf + if self.max_query_terms != 25: + filters["max_query_terms"] = self.max_query_terms + if self.boost != 1.0: + filters["boost"] = self.boost + if self.min_similarity != 0.5: + filters["min_similarity"] = self.min_similarity + return {self._internal_name: {self.field: filters}} + + +class MatchAllQuery(Query): + """ + Query used to match all + + Example: + + q = MatchAllQuery() + results = conn.search(q) + + """ + _internal_name = "match_all" + + def __init__(self, boost=None, **kwargs): + super(MatchAllQuery, self).__init__(**kwargs) + self.boost = boost + + def serialize(self): + filters = {} + if self.boost: + if isinstance(self.boost, (float, int)): + filters['boost'] = self.boost + else: + filters['boost'] = float(self.boost) + return {self._internal_name: filters} + + +class MoreLikeThisQuery(Query): + _internal_name = "more_like_this" + + def __init__(self, fields, like_text, + percent_terms_to_match=0.3, + min_term_freq=2, + max_query_terms=25, + stop_words=None, + min_doc_freq=5, + max_doc_freq=None, + min_word_len=0, + max_word_len=0, + boost_terms=1, + boost=1.0, **kwargs): + super(MoreLikeThisQuery, self).__init__(**kwargs) + self.fields = fields + self.like_text = like_text + self.stop_words = stop_words or [] + self.percent_terms_to_match = percent_terms_to_match + self.min_term_freq = min_term_freq + self.max_query_terms = max_query_terms + self.min_doc_freq = min_doc_freq + self.max_doc_freq = max_doc_freq + self.min_word_len = min_word_len + self.max_word_len = max_word_len + self.boost_terms = boost_terms + self.boost = boost + + def serialize(self): + filters = {'fields': self.fields, + 'like_text': self.like_text} + + if self.percent_terms_to_match != 0.3: + filters["percent_terms_to_match"] = self.percent_terms_to_match + if self.min_term_freq != 2: + filters["min_term_freq"] = self.min_term_freq + if self.max_query_terms != 25: + filters["max_query_terms"] = self.max_query_terms + if self.stop_words: + filters["stop_words"] = self.stop_words + if self.min_doc_freq != 5: + filters["min_doc_freq"] = self.min_doc_freq + if self.max_doc_freq: + filters["max_doc_freq"] = self.max_doc_freq + if self.min_word_len: + filters["min_word_len"] = self.min_word_len + if self.max_word_len: + filters["max_word_len"] = self.max_word_len + if self.boost_terms: + filters["boost_terms"] = self.boost_terms + + if self.boost != 1.0: + filters["boost"] = self.boost + return {self._internal_name: filters} + + +class FilterQuery(Query): + _internal_name = "query" + + def __init__(self, filters=None, **kwargs): + super(FilterQuery, self).__init__(**kwargs) + + self._filters = [] + if filters is not None: + self.add(filters) + + def add(self, filterquery): + if isinstance(filterquery, list): + self._filters.extend(filterquery) + else: + self._filters.append(filterquery) + + def serialize(self): + filters = [f.serialize() for f in self._filters] + if not filters: + raise RuntimeError("A least one filter must be declared") + return {self._internal_name: {"filter": filters}} + +# def __repr__(self): +# return str(self.q) + + +class PrefixQuery(Query): + def __init__(self, field=None, prefix=None, boost=None, **kwargs): + super(PrefixQuery, self).__init__(**kwargs) + self._values = {} + + if field is not None and prefix is not None: + self.add(field, prefix, boost) + + def add(self, field, prefix, boost=None): + match = {'prefix': prefix} + if boost: + if isinstance(boost, (float, int)): + match['boost'] = boost + else: + match['boost'] = float(boost) + self._values[field] = match + + def serialize(self): + if not self._values: + raise RuntimeError("A least a field/prefix pair must be added") + return {"prefix": self._values} + + +class TermQuery(Query): + """Match documents that have fields that contain a term (not analyzed). + + A boost may be supplied. + + Example: + + q = TermQuery('name', 'john') + results = conn.search(q) + + With boost: + + q = TermQuery('name', 'john', boost=0.75) + results = conn.search(q) + + """ + _internal_name = "term" + + def __init__(self, field=None, value=None, boost=None, **kwargs): + super(TermQuery, self).__init__(**kwargs) + self._values = {} + + if field is not None and value is not None: + self.add(field, value, boost) + + def add(self, field, value, boost=None): + match = {'value': value} + if boost: + if isinstance(boost, (float, int)): + match['boost'] = boost + else: + match['boost'] = float(boost) + self._values[field] = match + return + + self._values[field] = value + + def serialize(self): + if not self._values: + raise RuntimeError("A least a field/value pair must be added") + return {self._internal_name: self._values} + + +class TermsQuery(TermQuery): + _internal_name = "terms" + + def __init__(self, *args, **kwargs): + super(TermsQuery, self).__init__(*args, **kwargs) + + def add(self, field, value, minimum_match=1): + if not isinstance(value, list): + raise InvalidParameterQuery("value %r must be valid list" % value) + self._values[field] = value + if minimum_match: + if isinstance(minimum_match, int): + self._values['minimum_match'] = minimum_match + else: + self._values['minimum_match'] = int(minimum_match) + + +class TextQuery(Query): + """ + A new family of text queries that accept text, analyzes it, and constructs a query out of it. + + Examples: + + q = TextQuery('book_title', 'elasticsearch') + results = conn.search(q) + + q = TextQuery('book_title', 'elasticsearch python', operator='and') + results = conn.search(q) + + """ + _internal_name = "text" + _valid_types = ['boolean', "phrase", "phrase_prefix"] + _valid_operators = ['or', "and"] + + def __init__(self, field, text, type="boolean", slop=0, fuzziness=None, + prefix_length=0, max_expansions=2147483647, + operator="or", analyzer=None, **kwargs): + super(TextQuery, self).__init__(**kwargs) + self.queries = {} + self.add_query(field, text, type, slop, fuzziness, + prefix_length, max_expansions, + operator, analyzer) + + def add_query(self, field, text, type="boolean", slop=0, fuzziness=None, + prefix_length=0, max_expansions=2147483647, + operator="or", analyzer=None): + if type not in self._valid_types: + raise QueryError("Invalid value '%s' for type: allowed values are %s" % (type, self._valid_types)) + if operator not in self._valid_operators: + raise QueryError( + "Invalid value '%s' for operator: allowed values are %s" % (operator, self._valid_operators)) + + query = {'type': type, + 'query': text} + if slop: + query["slop"] = slop + if fuzziness is not None: + query["fuzziness"] = fuzziness + if prefix_length: + query["prefix_length"] = prefix_length + if max_expansions != 2147483647: + query["max_expansions"] = max_expansions + if operator: + query["operator"] = operator + + self.queries[field] = query + + def serialize(self): + return {self._internal_name: self.queries} + + +class RegexTermQuery(TermQuery): + _internal_name = "regex_term" + + def __init__(self, *args, **kwargs): + super(RegexTermQuery, self).__init__(*args, **kwargs) + + +class StringQuery(Query): + """ + Query to match values on all fields for a given string + + Example: + + q = StringQuery('elasticsearch') + results = conn.search(q) + + """ + _internal_name = "query_string" + + def __init__(self, query, default_field=None, + search_fields=None, + default_operator="OR", + analyzer=None, + allow_leading_wildcard=True, + lowercase_expanded_terms=True, + enable_position_increments=True, + fuzzy_prefix_length=0, + fuzzy_min_sim=0.5, + phrase_slop=0, + boost=1.0, + analyze_wildcard=False, + use_dis_max=True, + tie_breaker=0, + clean_text=False, + **kwargs): + super(StringQuery, self).__init__(**kwargs) + self.clean_text = clean_text + self.search_fields = search_fields or [] + self.query = query + self.default_field = default_field + self.default_operator = default_operator + self.analyzer = analyzer + self.allow_leading_wildcard = allow_leading_wildcard + self.lowercase_expanded_terms = lowercase_expanded_terms + self.enable_position_increments = enable_position_increments + self.fuzzy_prefix_length = fuzzy_prefix_length + self.fuzzy_min_sim = fuzzy_min_sim + self.phrase_slop = phrase_slop + self.boost = boost + self.analyze_wildcard = analyze_wildcard + self.use_dis_max = use_dis_max + self.tie_breaker = tie_breaker + + + def serialize(self): + filters = {} + if self.default_field: + filters["default_field"] = self.default_field + if not isinstance(self.default_field, (str, unicode)) and isinstance(self.default_field, list): + if not self.use_dis_max: + filters["use_dis_max"] = self.use_dis_max + if self.tie_breaker: + filters["tie_breaker"] = self.tie_breaker + + if self.default_operator != "OR": + filters["default_operator"] = self.default_operator + if self.analyzer: + filters["analyzer"] = self.analyzer + if not self.allow_leading_wildcard: + filters["allow_leading_wildcard"] = self.allow_leading_wildcard + if not self.lowercase_expanded_terms: + filters["lowercase_expanded_terms"] = self.lowercase_expanded_terms + if not self.enable_position_increments: + filters["enable_position_increments"] = self.enable_position_increments + if self.fuzzy_prefix_length: + filters["fuzzy_prefix_length"] = self.fuzzy_prefix_length + if self.fuzzy_min_sim != 0.5: + filters["fuzzy_min_sim"] = self.fuzzy_min_sim + if self.phrase_slop: + filters["phrase_slop"] = self.phrase_slop + if self.search_fields: + if isinstance(self.search_fields, (str, unicode)): + filters["fields"] = [self.search_fields] + else: + filters["fields"] = self.search_fields + + if len(filters["fields"]) > 1: + if not self.use_dis_max: + filters["use_dis_max"] = self.use_dis_max + if self.tie_breaker: + filters["tie_breaker"] = self.tie_breaker + if self.boost != 1.0: + filters["boost"] = self.boost + if self.analyze_wildcard: + filters["analyze_wildcard"] = self.analyze_wildcard + if self.clean_text: + query = clean_string(self.query) + if not query: + raise InvalidQuery("The query is empty") + filters["query"] = query + else: + if not self.query.strip(): + raise InvalidQuery("The query is empty") + filters["query"] = self.query + return {self._internal_name: filters} + + +class RangeQuery(Query): + def __init__(self, qrange=None, **kwargs): + super(RangeQuery, self).__init__(**kwargs) + + self.ranges = [] + if qrange: + self.add(qrange) + + def add(self, qrange): + if isinstance(qrange, list): + self.ranges.extend(qrange) + elif isinstance(qrange, ESRange): + self.ranges.append(qrange) + + def serialize(self): + if not self.ranges: + raise RuntimeError("A least a range must be declared") + filters = dict([r.serialize() for r in self.ranges]) + return {"range": filters} + + +class SpanFirstQuery(TermQuery): + _internal_name = "span_first" + + def __init__(self, field=None, value=None, end=3, **kwargs): + super(SpanFirstQuery, self).__init__(**kwargs) + self._values = {} + self.end = end + if field is not None and value is not None: + self.add(field, value) + + def serialize(self): + if not self._values: + raise RuntimeError("A least a field/value pair must be added") + return {self._internal_name: {"match": {"span_first": self._values}, + "end": self.end}} + + +class SpanNearQuery(Query): + """ + Matches spans which are near one another. One can specify _slop_, + the maximum number of intervening unmatched positions, as well as + whether matches are required to be in-order. + + The clauses element is a list of one or more other span type queries and + the slop controls the maximum number of intervening unmatched positions + permitted. + """ + _internal_name = "span_near" + + def __init__(self, clauses=None, slop=1, + in_order=None, + collect_payloads=None, **kwargs): + super(SpanNearQuery, self).__init__(**kwargs) + self.clauses = clauses or [] + self.slop = slop + self.in_order = in_order + self.collect_payloads = collect_payloads + + def _validate(self): + for clause in self.clauses: + if not is_a_spanquery(clause): + raise RuntimeError("Invalid clause:%r" % clause) + + def serialize(self): + if not self.clauses or len(self.clauses) == 0: + raise RuntimeError("A least a Span*Query must be added to clauses") + data = {"slop": self.slop} + if self.in_order is not None: + data["in_order"] = self.in_order + if self.collect_payloads is not None: + data["collect_payloads"] = self.collect_payloads + + data['clauses'] = [clause.serialize() for clause in self.clauses] + + return {self._internal_name: data} + + +class SpanNotQuery(Query): + """ + Removes matches which overlap with another span query. + + The include and exclude clauses can be any span type query. The include + clause is the span query whose matches are filtered, and the exclude + clause is the span query whose matches must not overlap those returned. + """ + _internal_name = "span_not" + + def __init__(self, include, exclude, **kwargs): + super(SpanNotQuery, self).__init__(**kwargs) + self.include = include + self.exclude = exclude + + def _validate(self): + if not is_a_spanquery(self.include): + raise RuntimeError("Invalid clause:%r" % self.include) + if not is_a_spanquery(self.exclude): + raise RuntimeError("Invalid clause:%r" % self.exclude) + + def serialize(self): + self._validate() + return {self._internal_name: {'include': self.include.serialize(), 'exclude': self.exclude.serialize()}} + + +def is_a_spanquery(obj): + """ + Returns if the object is a span query + """ + return isinstance(obj, (SpanTermQuery, SpanFirstQuery, SpanOrQuery)) + + +class SpanOrQuery(Query): + """ + Matches the union of its span clauses. + + The clauses element is a list of one or more other span type queries. + """ + _internal_name = "span_or" + + def __init__(self, clauses=None, **kwargs): + super(SpanOrQuery, self).__init__(**kwargs) + self.clauses = clauses or [] + + def _validate(self): + for clause in self.clauses: + if not is_a_spanquery(clause): + raise RuntimeError("Invalid clause:%r" % clause) + + def serialize(self): + if not self.clauses or len(self.clauses) == 0: + raise RuntimeError("A least a Span*Query must be added to clauses") + clauses = [clause.serialize() for clause in self.clauses] + return {self._internal_name: {"clauses": clauses}} + + +class SpanTermQuery(TermQuery): + _internal_name = "span_term" + + def __init__(self, *args, **kwargs): + super(SpanTermQuery, self).__init__(*args, **kwargs) + + +class WildcardQuery(TermQuery): + _internal_name = "wildcard" + + def __init__(self, *args, **kwargs): + super(WildcardQuery, self).__init__(*args, **kwargs) + + +class CustomScoreQuery(Query): + _internal_name = "custom_score" + + def __init__(self, query=None, script=None, params=None, lang=None, + **kwargs): + super(CustomScoreQuery, self).__init__(**kwargs) + self.query = query + self.script = script + self.lang = lang + if params is None: + params = {} + self.params = params + + def add_param(self, name, value): + """ + Add a parameter + """ + self.params[name] = value + + def serialize(self): + data = {} + if not self.query: + raise RuntimeError("A least a query must be declared") + data['query'] = self.query.serialize() + if not self.script: + raise RuntimeError("A script must be provided") + data['script'] = self.script + if self.params: + data['params'] = self.params + if self.lang: + data['lang'] = self.lang + return {self._internal_name: data} + +# def __repr__(self): +# return str(self.q) + + +class IdsQuery(Query): + _internal_name = "ids" + + def __init__(self, values, type=None, **kwargs): + super(IdsQuery, self).__init__(**kwargs) + self.type = type + self.values = values + + def serialize(self): + data = {} + if self.type is not None: + data['type'] = self.type + if isinstance(self.values, basestring): + data['values'] = [self.values] + else: + data['values'] = self.values + + return {self._internal_name: data} + + +class PercolatorQuery(Query): + """A percolator query is used to determine which registered + PercolatorDoc's match the document supplied. + + """ + + def __init__(self, doc, query=None, **kwargs): + """Constructor + + doc - the doc to match against, dict + query - an additional query that can be used to filter the percolated + queries used to match against. + """ + super(PercolatorQuery, self).__init__(**kwargs) + self.doc = doc + self.query = query + + def serialize(self): + """Serialize the query to a structure using the query DSL. + + """ + data = {'doc': self.doc} + if hasattr(self.query, 'serialize'): + data['query'] = self.query.serialize() + return data + + def search(self, **kwargs): + """Disable this as it is not allowed in percolator queries.""" + raise NotImplementedError() + + def to_search_json(self): + """Disable this as it is not allowed in percolator queries.""" + raise NotImplementedError() + + +class CustomFiltersScoreQuery(Query): + _internal_name = "custom_filters_score" + + class ScoreMode(object): + FIRST = "first" + MIN = "min" + MAX = "max" + TOTAL = "total" + AVG = "avg" + MULTIPLY = "multiply" + + class Filter(EqualityComparableUsingAttributeDictionary): + def __init__(self, filter_, boost=None, script=None): + if (boost is None) == (script is None): + raise ValueError("Exactly one of boost and script must be specified") + + self.filter_ = filter_ + self.boost = boost + self.script = script + + def serialize(self): + data = {'filter': self.filter_.serialize()} + if self.boost is not None: + data['boost'] = self.boost + if self.script is not None: + data['script'] = self.script + return data + + def __init__(self, query, filters, score_mode=None, params=None, lang=None, **kwargs): + super(CustomFiltersScoreQuery, self).__init__(**kwargs) + self.query = query + self.filters = filters + self.score_mode = score_mode + self.params = params + self.lang = lang + + def serialize(self): + data = {'query': self.query.serialize(), + 'filters': [filter_.serialize() for filter_ in self.filters]} + if self.score_mode is not None: + data['score_mode'] = self.score_mode + if self.params is not None: + data['params'] = self.params + if self.lang is not None: + data['lang'] = self.lang + return {self._internal_name: data} diff --git a/src/archivematicaCommon/lib/externals/pyes/rivers.py b/src/archivematicaCommon/lib/externals/pyes/rivers.py new file mode 100644 index 0000000000..19f0e574c0 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/rivers.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +try: + # For Python < 2.6 or people using a newer version of simplejson + import simplejson + + json = simplejson +except ImportError: + # For Python >= 2.6 + import json + +from .es import ES + +class River(object): + def __init__(self, index_name=None, index_type=None, bulk_size=100, bulk_timeout=None): + self.name = index_name + self.index_name = index_name + self.index_type = index_type + self.bulk_size = bulk_size + self.bulk_timeout = bulk_timeout + + @property + def q(self): + res = self.serialize() + index = {} + if self.name: + index['name'] = self.name + if self.index_name: + index['index'] = self.index_name + if self.index_type: + index['type'] = self.index_type + if self.bulk_size: + index['bulk_size'] = self.bulk_size + if self.bulk_timeout: + index['bulk_timeout'] = self.bulk_timeout + if index: + res['index'] = index + return res + + def __repr__(self): + return str(self.q) + + def to_json(self): + return json.dumps(self.q, cls=ES.encoder) + + def serialize(self): + raise NotImplementedError + + +class RabbitMQRiver(River): + type = "rabbitmq" + + def __init__(self, host="localhost", port=5672, user="guest", + password="guest", vhost="/", queue="es", exchange="es", + routing_key="es", **kwargs): + super(RabbitMQRiver, self).__init__(**kwargs) + self.host = host + self.port = port + self.user = user + self.password = password + self.vhost = vhost + self.queue = queue + self.exchange = exchange + self.routing_key = routing_key + + def serialize(self): + return { + "type": self.type, + self.type: { + "host": self.host, + "port": self.port, + "user": self.user, + "pass": self.password, + "vhost": self.vhost, + "queue": self.queue, + "exchange": self.exchange, + "routing_key": self.routing_key + } + } + + +class TwitterRiver(River): + type = "twitter" + + def __init__(self, user, password, **kwargs): + super(TwitterRiver, self).__init__(**kwargs) + self.user = user + self.password = password + + + def serialize(self): + return { + "type": self.type, + self.type: { + "user": self.user, + "password": self.password, + } + } + + +class CouchDBRiver(River): + type = "couchdb" + + def __init__(self, host="localhost", port=5984, db="mydb", filter=None, + filter_params=None, script=None, user=None, password=None, + **kwargs): + super(CouchDBRiver, self).__init__(**kwargs) + self.host = host + self.port = port + self.db = db + self.filter = filter + self.filter_params = filter_params + self.script = script + self.user = user + self.password = password + + def serialize(self): + result = { + "type": self.type, + self.type: { + "host": self.host, + "port": self.port, + "db": self.db, + "filter": self.filter, + } + } + if self.filter_params is not None: + result[self.type]["filter_params"] = self.filter_params + if self.script is not None: + result[self.type]["script"] = self.script + if self.user is not None: + result[self.type]["user"] = self.user + if self.password is not None: + result[self.type]["password"] = self.password + return result diff --git a/src/archivematicaCommon/lib/externals/pyes/scriptfields.py b/src/archivematicaCommon/lib/externals/pyes/scriptfields.py new file mode 100644 index 0000000000..e3fc76ed29 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/scriptfields.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +from .exceptions import ScriptFieldsError + +class ScriptFields(object): + """ + This object create the script_fields definition + """ + _internal_name = "script_fields" + + def __init__(self, field_name, script, params=None): + self.fields = {} + self.add_field(field_name, script, params or {}) + + def add_field(self, field_name, script, params=None): + """ + Add a field to script_fields + """ + data = {} + if script: + data['script'] = script + else: + raise ScriptFieldsError("Script is required for script_fields definition") + if params: + if isinstance(params, dict): + if len(params): + data['params'] = params + else: + raise ScriptFieldsError("Parameters should be a valid dictionary") + + self.fields[field_name] = data + + def add_parameter(self, field_name, param_name, param_value): + """ + Add a parameter to a field into script_fields + + The ScriptFields object will be returned, so calls to this can be chained. + """ + try: + self.fields[field_name]['params'][param_name] = param_value + except Exception as ex: + raise ScriptFieldsError("Error adding parameter %s with value %s :%s" % (param_name, param_value, ex)) + + return self + + def serialize(self): + return self.fields diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/__init__.py b/src/archivematicaCommon/lib/externals/pyes/tests/__init__.py new file mode 100644 index 0000000000..40a96afc6f --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/data/map.json b/src/archivematicaCommon/lib/externals/pyes/tests/data/map.json new file mode 100644 index 0000000000..e02f9fe796 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/data/map.json @@ -0,0 +1,411 @@ +{ + "testindex": { + "mydoctype": { + "_all": { + "enabled": true, + "store": "yes", + "term_vector": "no" + }, + "_boost": { + "name": "_boost" + }, + "_id": { + "store": "yes" + }, + "_index": { + "enabled": false, + "store": "no" + }, + "_source": { + "enabled": true, + "name": "_source" + }, + "_type": { + "store": "yes" + }, + "date_formats": [ + "dateOptionalTime", + "yyyy/MM/dd HH:mm:ss||yyyy/MM/dd" + ], + "dynamic": true, + "enabled": true, + "path": "full", + "properties": { + "firm": { + "dynamic": true, + "enabled": true, + "path": "full", + "properties": { + "id": { + "boost": 1.0, + "index": "not_analyzed", + "index_name": "id", + "omit_norms": true, + "omit_term_freq_and_positions": true, + "precision_step": 4, + "store": "yes", + "term_vector": "no", + "type": "long" + }, + "name_of_firm": { + "fields": { + "name_of_firm": { + "boost": 1.0, + "index": "analyzed", + "index_name": "name_of_firm", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "yes", + "term_vector": "with_positions_offsets", + "type": "string" + }, + "untouched": { + "boost": 1.0, + "include_in_all": false, + "index": "not_analyzed", + "index_name": "untouched", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "yes", + "term_vector": "no", + "type": "string" + } + }, + "path": "full", + "type": "multi_field" + } + }, + "type": "object" + }, + "code": { + "dynamic": true, + "enabled": true, + "path": "full", + "properties": { + "short_description": { + "boost": 1.0, + "index": "analyzed", + "index_name": "short_description", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "no", + "term_vector": "no", + "type": "string" + }, + "description": { + "boost": 1.0, + "index": "analyzed", + "index_name": "description", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "yes", + "term_vector": "with_positions_offsets", + "type": "string" + }, + "id": { + "boost": 1.0, + "index": "not_analyzed", + "index_name": "id", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "yes", + "term_vector": "no", + "type": "string" + }, + "id_typeric": { + "boost": 1.0, + "index": "analyzed", + "index_name": "id_typeric", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "no", + "term_vector": "no", + "type": "string" + }, + "leaf": { + "boost": 1.0, + "index": "analyzed", + "index_name": "leaf", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "no", + "term_vector": "no", + "type": "string" + }, + "level": { + "boost": 1.0, + "index": "not_analyzed", + "index_name": "level", + "omit_norms": true, + "omit_term_freq_and_positions": true, + "precision_step": 4, + "store": "no", + "term_vector": "no", + "type": "long" + } + }, + "type": "object" + }, + "codsan": { + "boost": 1.0, + "index": "not_analyzed", + "index_name": "codsan", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "yes", + "term_vector": "no", + "type": "string" + }, + "tags": { + "fields": { + "tags": { + "boost": 1.0, + "index": "analyzed", + "index_name": "tags", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "yes", + "term_vector": "with_positions_offsets", + "type": "string" + }, + "untouched": { + "boost": 1.0, + "include_in_all": false, + "index": "not_analyzed", + "index_name": "untouched", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "yes", + "term_vector": "no", + "type": "string" + } + }, + "path": "full", + "type": "multi_field" + }, + "product": { + "dynamic": true, + "enabled": true, + "path": "full", + "properties": { + "id": { + "boost": 1.0, + "index": "not_analyzed", + "index_name": "id", + "omit_norms": true, + "omit_term_freq_and_positions": true, + "precision_step": 4, + "store": "yes", + "term_vector": "no", + "type": "long" + }, + "name_of_base": { + "boost": 1.0, + "index": "not_analyzed", + "index_name": "name_of_base", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "yes", + "term_vector": "no", + "type": "string" + }, + "name_of_product": { + "fields": { + "name_of_product": { + "boost": 1.0, + "index": "analyzed", + "index_name": "name_of_product", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "yes", + "term_vector": "with_positions_offsets", + "type": "string" + }, + "untouched": { + "boost": 1.0, + "include_in_all": false, + "index": "not_analyzed", + "index_name": "untouched", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "yes", + "term_vector": "no", + "type": "string" + } + }, + "path": "full", + "type": "multi_field" + } + }, + "type": "object" + }, + "flag_mod": { + "boost": 1.0, + "index": "not_analyzed", + "index_name": "flag_mod", + "omit_norms": true, + "omit_term_freq_and_positions": false, + "store": "yes", + "term_vector": "no", + "type": "boolean" + }, + "stock": { + "boost": 1.0, + "index": "not_analyzed", + "index_name": "stock", + "omit_norms": true, + "omit_term_freq_and_positions": false, + "store": "yes", + "term_vector": "no", + "type": "boolean" + }, + "id": { + "boost": 1.0, + "index": "not_analyzed", + "index_name": "id", + "omit_norms": true, + "omit_term_freq_and_positions": true, + "precision_step": 4, + "store": "yes", + "term_vector": "no", + "type": "long" + }, + "note": { + "dynamic": true, + "enabled": true, + "path": "full", + "properties": { + "filename": { + "boost": 1.0, + "index": "not_analyzed", + "index_name": "filename", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "no", + "term_vector": "no", + "type": "string" + }, + "numero": { + "boost": 1.0, + "index": "not_analyzed", + "index_name": "numero", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "yes", + "term_vector": "no", + "type": "string" + }, + "title": { + "fields": { + "title": { + "boost": 1.0, + "index": "analyzed", + "index_name": "title", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "yes", + "term_vector": "with_positions_offsets", + "type": "string" + }, + "untouched": { + "boost": 1.0, + "include_in_all": false, + "index": "not_analyzed", + "index_name": "untouched", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "yes", + "term_vector": "no", + "type": "string" + } + }, + "path": "full", + "type": "multi_field" + } + }, + "type": "object" + }, + "price": { + "boost": 1.0, + "index": "not_analyzed", + "index_name": "price", + "omit_norms": true, + "omit_term_freq_and_positions": true, + "precision_step": 4, + "store": "yes", + "term_vector": "no", + "type": "double" + }, + "folder": { + "dynamic": true, + "enabled": true, + "path": "full", + "properties": { + "id": { + "boost": 1.0, + "index": "not_analyzed", + "index_name": "id", + "omit_norms": true, + "omit_term_freq_and_positions": true, + "precision_step": 4, + "store": "yes", + "term_vector": "no", + "type": "long" + }, + "title": { + "boost": 1.0, + "index": "analyzed", + "index_name": "title", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "yes", + "term_vector": "with_positions_offsets", + "type": "string" + } + }, + "type": "object" + }, + "folder_sub": { + "dynamic": true, + "enabled": true, + "path": "full", + "properties": { + "id": { + "boost": 1.0, + "index": "not_analyzed", + "index_name": "id", + "omit_norms": true, + "omit_term_freq_and_positions": true, + "precision_step": 4, + "store": "yes", + "term_vector": "no", + "type": "long" + }, + "title": { + "boost": 1.0, + "index": "analyzed", + "index_name": "title", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "yes", + "term_vector": "with_positions_offsets", + "type": "string" + } + }, + "type": "object" + }, + "type_folder": { + "boost": 1.0, + "index": "not_analyzed", + "index_name": "type_folder", + "omit_norms": false, + "omit_term_freq_and_positions": false, + "store": "yes", + "term_vector": "no", + "type": "string" + } + }, + "type": "object" + } + } +} diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/data/testXHTML.html b/src/archivematicaCommon/lib/externals/pyes/tests/data/testXHTML.html new file mode 100644 index 0000000000..bdccf83ba5 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/data/testXHTML.html @@ -0,0 +1,29 @@ +<!-- + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> +<html xmlns="http://www.w3.org/1999/xhtml"> +<head> + <title>XHTML test document + + + + +

+ This document tests the ability of Apache Tika to extract content + from an XHTML document. +

+ + \ No newline at end of file diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/estestcase.py b/src/archivematicaCommon/lib/externals/pyes/tests/estestcase.py new file mode 100644 index 0000000000..01dee6bb05 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/estestcase.py @@ -0,0 +1,154 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import os + +""" +Unit tests for pyes. These require an es server with thrift plugin running on the default port (localhost:9500). +""" +import unittest +from pprint import pprint +from ..es import ES +from ..helpers import SettingsBuilder + +def get_conn(*args, **kwargs): + return ES(("http", "127.0.0.1", 9200), *args, **kwargs) + + +class ESTestCase(unittest.TestCase): + def setUp(self): + self.conn = get_conn(timeout=300.0)#incremented timeout for debugging + self.index_name = "test-index" + self.document_type = "test-type" + self.conn.delete_index_if_exists(self.index_name) + + def tearDown(self): + self.conn.delete_index_if_exists(self.index_name) + + def assertResultContains(self, result, expected): + for (key, value) in expected.items(): + found = False + try: + found = value == result[key] + except KeyError: + if result.has_key('meta'): + found = value == result['meta'][key] + self.assertEquals(True, found) + + #self.assertEquals(value, result[key]) + + def checkRaises(self, excClass, callableObj, *args, **kwargs): + """Assert that calling callableObj with *args and **kwargs raises an + exception of type excClass, and return the exception object so that + further tests on it can be performed. + """ + try: + callableObj(*args, **kwargs) + except excClass, e: + return e + else: + raise self.failureException,\ + "Expected exception %s not raised" % excClass + + def get_datafile(self, filename): + """ + Returns a the content of a test file + """ + return open(os.path.join(os.path.dirname(__file__), "data", filename), "rb").read() + + def get_datafile_path(self, filename): + """ + Returns a the content of a test file + """ + return os.path.join(os.path.dirname(__file__), "data", filename) + + def dump(self, result): + """ + dump to stdout the result + """ + pprint(result) + + def init_default_index(self): + settings = SettingsBuilder() + settings.add_mapping({self.document_type: {'properties': + {u'parsedtext': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'name': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'title': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'pos': {'store': 'yes', + 'type': u'integer'}, + u'uuid': {'boost': 1.0, + 'index': 'not_analyzed', + 'store': 'yes', + 'type': u'string'}} + }}, name=self.document_type) + + self.conn.create_index(self.index_name, settings) + + +def setUp(): + """Package level setup. + + For tests which don't modify the index, we don't want to have the overhead + of setting up a test index, so we just set up test-pindex once, and use it + for all tests. + + """ + mapping = { + u'parsedtext': { + 'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'name': { + 'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'title': { + 'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'pos': { + 'store': 'yes', + 'type': u'integer'}, + u'doubles': { + 'store': 'yes', + 'type': u'double'}, + u'uuid': { + 'boost': 1.0, + 'index': 'not_analyzed', + 'store': 'yes', + 'type': u'string'}} + + conn = get_conn() + conn.delete_index_if_exists("test-pindex") + conn.create_index("test-pindex") + conn.put_mapping("test-type", {'properties': mapping}, ["test-pindex"]) + conn.index({"name": "Joe Tester", "parsedtext": "Joe Testere nice guy", "uuid": "11111", "position": 1, + "doubles": [1.0, 2.0, 3.0]}, "test-pindex", "test-type", 1) + conn.index({"name": "Bill Baloney", "parsedtext": "Joe Testere nice guy", "uuid": "22222", "position": 2, + "doubles": [0.1, 0.2, 0.3]}, "test-pindex", "test-type", 2) + conn.refresh(["test-pindex"]) + + +def tearDown(): + """Remove the package level index. + + """ + conn = get_conn() + conn.delete_index_if_exists("test-pindex") diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_aliases.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_aliases.py new file mode 100644 index 0000000000..fb1323dd6e --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_aliases.py @@ -0,0 +1,108 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from .estestcase import ESTestCase +import unittest +from .. import exceptions + +class ErrorReportingTestCase(ESTestCase): + def setUp(self): + super(ErrorReportingTestCase, self).setUp() + #self.conn.set_alias('test-alias', ['_river']) + #self.conn.delete_alias('test-alias', ['_river']) + self.conn.delete_index_if_exists('test-index2') + + def tearDown(self): + #self.conn.set_alias('test-alias', ['_river']) + #self.conn.delete_alias('test-alias', ['_river']) + self.conn.delete_index_if_exists('test-index2') + + def testCreateDeleteAliases(self): + """Test errors thrown when creating or deleting aliases. + + """ + self.assertTrue('ok' in self.conn.create_index(self.index_name)) + + # Check initial output of get_indices. + result = self.conn.get_indices(include_aliases=True) + self.assertTrue('test-index' in result) + self.assertEqual(result['test-index'], {'num_docs': 0}) + self.assertTrue('test-alias' not in result) + + # Check getting a missing alias. + err = self.checkRaises(exceptions.IndexMissingException, + self.conn.get_alias, 'test-alias') + self.assertEqual(str(err), '[test-alias] missing') + + # Check deleting a missing alias (doesn't return a error). + self.conn.delete_alias("test-alias", self.index_name) + + # Add an alias from test-alias to test-index + self.conn.change_aliases([['add', 'test-index', 'test-alias']]) + self.assertEqual(self.conn.get_alias("test-alias"), ['test-index']) + + # Adding an alias to a missing index fails + err = self.checkRaises(exceptions.IndexMissingException, + self.conn.change_aliases, + [['add', 'test-missing-index', 'test-alias']]) + self.assertEqual(str(err), '[test-missing-index] missing') + self.assertEqual(self.conn.get_alias("test-alias"), ['test-index']) + + # # An alias can't be deleted using delete_index. + # err = self.checkRaises(exceptions.NotFoundException, + # self.conn.delete_index, 'test-alias') + # self.assertEqual(str(err), '[test-alias] missing') + + # Check return value from get_indices now. + result = self.conn.get_indices(include_aliases=True) + self.assertTrue('test-index' in result) + self.assertEqual(result['test-index'], {'num_docs': 0}) + self.assertTrue('test-alias' in result) + self.assertEqual(result['test-alias'], {'alias_for': ['test-index'], 'num_docs': 0}) + + result = self.conn.get_indices(include_aliases=False) + self.assertTrue('test-index' in result) + self.assertEqual(result['test-index'], {'num_docs': 0}) + self.assertTrue('test-alias' not in result) + + # Add an alias to test-index2. + self.assertTrue('ok' in self.conn.create_index("test-index2")) + self.conn.change_aliases([['add', 'test-index2', 'test-alias']]) + self.assertEqual(sorted(self.conn.get_alias("test-alias")), + ['test-index', 'test-index2']) + + # Check deleting multiple indices from an alias. + self.conn.delete_alias("test-alias", [self.index_name, "test-index2"]) + self.checkRaises(exceptions.IndexMissingException, self.conn.get_alias, 'test-alias') + + # Check deleting multiple indices from a missing alias (still no error) + self.conn.delete_alias("test-alias", [self.index_name, "test-index2"]) + + # Check that we still get an error for a missing alias. + err = self.checkRaises(exceptions.IndexMissingException, + self.conn.get_alias, 'test-alias') + self.assertEqual(str(err), '[test-alias] missing') + + def testWriteToAlias(self): + self.assertTrue('ok' in self.conn.create_index(self.index_name)) + self.assertTrue('ok' in self.conn.create_index("test-index2")) + self.assertTrue('ok' in self.conn.set_alias("test-alias", ['test-index'])) + self.assertTrue('ok' in self.conn.set_alias("test-alias2", ['test-index', 'test-index2'])) + + # Can write to aliases only if they point to exactly one index. + self.conn.index(dict(title='doc1'), 'test-index', 'testtype') + self.conn.index(dict(title='doc1'), 'test-index2', 'testtype') + self.conn.index(dict(title='doc1'), 'test-alias', 'testtype') + self.checkRaises(exceptions.ElasticSearchIllegalArgumentException, + self.conn.index, dict(title='doc1'), + 'test-alias2', 'testtype') + + self.conn.refresh() # ensure that the documents have been indexed. + # Check the document counts for each index or alias. + result = self.conn.get_indices(include_aliases=True) + self.assertEqual(result['test-index'], {'num_docs': 2}) + self.assertEqual(result['test-index2'], {'num_docs': 1}) + self.assertEqual(result['test-alias'], {'alias_for': ['test-index'], 'num_docs': 2}) + self.assertEqual(result['test-alias2'], {'alias_for': ['test-index', 'test-index2'], 'num_docs': 3}) + +if __name__ == "__main__": + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_attachments.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_attachments.py new file mode 100644 index 0000000000..9a2a5e206f --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_attachments.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import unittest +from .estestcase import ESTestCase +from ..query import TermQuery +from ..es import file_to_attachment +import os + +class TestFileSaveTestCase(ESTestCase): + def test_filesave(self): + mapping = { + "my_attachment": {"type": "attachment", + 'fields': { + "file": {'store': "yes"}, + "date": {'store': "yes"}, + "author": {'store': "yes"}, + "title": {'store': "yes"}, } + } + } + self.conn.create_index(self.index_name) + self.conn.put_mapping(self.document_type, {self.document_type: {'properties': mapping}}, self.index_name) + self.conn.refresh(self.index_name) + self.conn.get_mapping(self.document_type, self.index_name) + name = "map.json" + content = self.get_datafile(name) + self.conn.put_file(self.get_datafile_path(name), self.index_name, self.document_type, 1, name=name) + self.conn.refresh(self.index_name) + _ = self.conn.get_mapping(self.document_type, self.index_name) + nname, ncontent = self.conn.get_file(self.index_name, self.document_type, 1) + self.assertEquals(name, nname) + self.assertEquals(content, ncontent) + + +class QueryAttachmentTestCase(ESTestCase): + def setUp(self): + super(QueryAttachmentTestCase, self).setUp() + mapping = { + "attachment": {"type": "attachment", + 'fields': { + "file": {'store': "yes"}, + "date": {'store': "yes"}, + "author": {'store': "yes"}, + "title": {'store': "yes", "term_vector": "with_positions_offsets"}, + "attachment": {'store': "yes"}, + } + }, + 'uuid': {'boost': 1.0, + 'index': 'not_analyzed', + 'store': 'yes', + 'type': u'string'} + } + # mapping = { + # self.document_type: { + # "_index": {"enabled": "yes"}, + # "_id": {"store": "yes"}, + # "properties": { + # "attachment": { + # "type": "attachment", + # "fields": { + # "title": {"store": "yes", "term_vector" : "with_positions_offsets"}, + # "attachment": {"store":"yes", "term_vector" : "with_positions_offsets"} + # }, + # "store":"yes" + # + # }, + # "uuid": {"type": "string", "store": "yes", "index": "not_analyzed"} + # }, + # "_all": {"store": "yes", "term_vector": "with_positions_offsets"} + # } + # } + self.conn.debug_dump = True + self.conn.create_index(self.index_name) + self.conn.put_mapping(self.document_type, {self.document_type: {'properties': mapping}}, self.index_name) + self.conn.refresh(self.index_name) + self.conn.get_mapping(self.document_type, self.index_name) + self.conn.index({"attachment": file_to_attachment(self.get_datafile_path("testXHTML.html")), "uuid": "1"} + , self.index_name, self.document_type, 1) + self.conn.refresh(self.index_name) + + def test_TermQuery(self): + q = TermQuery("uuid", "1").search( + fields=['attachment', 'attachment.author', 'attachment.title', 'attachment.date']) + # q = TermQuery("uuid", "1", fields=['*']) + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + self.assertEquals(resultset.hits[0]['fields']['attachment.author'], u'Tika Developers') + +if __name__ == "__main__": + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_bulk.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_bulk.py new file mode 100644 index 0000000000..82a37ceb9d --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_bulk.py @@ -0,0 +1,241 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from .estestcase import ESTestCase +from ..query import TermQuery +from ..es import _raise_exception_if_bulk_item_failed, _is_bulk_item_ok +from ..exceptions import BulkOperationException + +class BulkTestCase(ESTestCase): + def setUp(self): + super(BulkTestCase, self).setUp() + mapping = {u'parsedtext': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'name': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'title': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'pos': {'store': 'yes', + 'type': u'integer'}, + u'uuid': {'boost': 1.0, + 'index': 'not_analyzed', + 'store': 'yes', + 'type': u'string'}} + self.conn.create_index(self.index_name) + self.conn.put_mapping(self.document_type, {'properties': mapping}, self.index_name) + + def test_force(self): + self.conn.raise_on_bulk_item_failure = False + self.conn.index({"name": "Joe Tester", "parsedtext": "Joe Testere nice guy", "uuid": "11111", "position": 1}, + self.index_name, self.document_type, 1, bulk=True) + self.conn.index({"name": "Bill Baloney", "parsedtext": "Bill Testere nice guy", "uuid": "22222", "position": 2}, + self.index_name, self.document_type, 2, bulk=True) + self.conn.index({"name": "Bill Clinton", "parsedtext": """Bill is not + nice guy""", "uuid": "33333", "position": 3}, self.index_name, self.document_type, 3, bulk=True) + bulk_result = self.conn.force_bulk() + self.assertEquals(len(bulk_result['items']), 3) + self.conn.refresh(self.index_name) + q = TermQuery("name", "bill") + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 2) + + def test_automatic_flush(self): + self.conn.force_bulk() + self.conn.bulk_size = 3 + self.conn.raise_on_bulk_item_failure = False + + self.assertIsNone( + self.conn.index({"name": "Joe Tester", "parsedtext": "Joe Testere nice guy", "uuid": "11111", "position": 1} + , + self.index_name, self.document_type, 4, bulk=True)) + self.assertIsNone(self.conn.flush_bulk(False)) + self.assertEqual(len(self.conn.bulker.bulk_data), 1) + + self.assertIsNone( + self.conn.index( + {"name": "Bill Baloney", "parsedtext": "Bill Testere nice guy", "uuid": "22222", "position": 2}, + self.index_name, self.document_type, 5, bulk=True)) + self.assertIsNone(self.conn.flush_bulk(False)) + self.assertEqual(len(self.conn.bulker.bulk_data), 2) + + bulk_result = self.conn.index( + {"name": "Bill Clinton", "parsedtext": """Bill is not nice guy""", "uuid": "33333", "position": 3}, + self.index_name, self.document_type, 6, bulk=True) + self.assertEquals(len(bulk_result['items']), 3) + self.assertEqual(self.conn.bulker.bulk_data, []) + + self.conn.bulk_size = 3 + + self.assertIsNone(self.conn.delete(self.index_name, self.document_type, 4, True)) + self.assertIsNone(self.conn.flush_bulk(False)) + self.assertEqual(len(self.conn.bulker.bulk_data), 1) + + self.assertIsNone(self.conn.delete(self.index_name, self.document_type, 5, True)) + self.assertIsNone(self.conn.flush_bulk(False)) + self.assertEqual(len(self.conn.bulker.bulk_data), 2) + + bulk_result = self.conn.delete(self.index_name, self.document_type, 6, True) + self.assertIsNone(self.conn.flush_bulk(False)) + self.assertEquals(len(bulk_result['items']), 3) + self.assertEqual(self.conn.bulker.bulk_data, []) + + self.conn.refresh(self.index_name) + + def test_error(self): + self.conn.force_bulk() + self.conn.bulk_size = 2 + + self.assertIsNone( + self.conn.index( + {"name": "Bill Baloney", "parsedtext": "Bill Testere nice guy", "uuid": "22222", "position": 2}, + self.index_name, self.document_type, 7, bulk=True)) + self.assertIsNone(self.conn.flush_bulk(False)) + self.assertEqual(len(self.conn.bulker.bulk_data), 1) + + bulk_result = self.conn.index( + "invalid", self.index_name, self.document_type, 8, bulk=True) + self.assertEquals(len(bulk_result['items']), 2) + self.assertTrue(bulk_result["items"][0]["index"]["ok"]) + self.assertTrue("error" in bulk_result["items"][1]["index"]) + self.assertEqual(self.conn.bulker.bulk_data, []) + + self.conn.bulk_size = 2 + self.assertIsNone(self.conn.delete( + self.index_name, self.document_type, 9, bulk=True)) + bulk_result = self.conn.delete( + self.index_name, "#foo", 9, bulk=True) + self.assertEquals(len(bulk_result['items']), 2) + self.assertTrue(bulk_result["items"][0]["delete"]["ok"]) + self.assertTrue("error" in bulk_result["items"][1]["delete"]) + self.assertEqual(self.conn.bulker.bulk_data, []) + + def test_raise_exception_if_bulk_item_failed(self): + index_ok_1 = {'index': {'_type': 'test-type', '_id': '4', 'ok': True, '_version': 1, '_index': 'test-index'}} + self.assertTrue(_is_bulk_item_ok(index_ok_1)) + index_ok_2 = {'index': {'_type': 'test-type', '_id': '5', 'ok': True, '_version': 1, '_index': 'test-index'}} + self.assertTrue(_is_bulk_item_ok(index_ok_2)) + index_ok_3 = {'index': {'_type': 'test-type', '_id': '6', 'ok': True, '_version': 1, '_index': 'test-index'}} + self.assertTrue(_is_bulk_item_ok(index_ok_3)) + + index_error_1 = {'index': {'_type': 'test-type', '_id': '8', '_index': 'test-index', + 'error': 'ElasticSearchParseException[Failed to derive xcontent from (offset=0, length=7): [105, 110, 118, 97, 108, 105, 100]]'}} + self.assertFalse(_is_bulk_item_ok(index_error_1)) + index_error_2 = {'index': {'_type': 'test-type', '_id': '9', '_index': 'test-index', + 'error': 'ElasticSearchParseException[Failed to derive xcontent from (offset=0, length=7): [105, 110, 118, 97, 108, 105, 100]]'}} + self.assertFalse(_is_bulk_item_ok(index_error_2)) + + delete_ok_1 = {'delete': {'_type': 'test-type', '_id': '4', 'ok': True, '_version': 2, '_index': 'test-index'}} + self.assertTrue(_is_bulk_item_ok(delete_ok_1)) + delete_ok_2 = {'delete': {'_type': 'test-type', '_id': '5', 'ok': True, '_version': 2, '_index': 'test-index'}} + self.assertTrue(_is_bulk_item_ok(delete_ok_2)) + delete_ok_3 = {'delete': {'_type': 'test-type', '_id': '6', 'ok': True, '_version': 2, '_index': 'test-index'}} + self.assertTrue(_is_bulk_item_ok(delete_ok_3)) + delete_error_1 = {'delete': {'_type': '#foo', '_id': '9', '_index': 'test-index', + 'error': "InvalidTypeNameException[mapping type name [#foo] should not include '#' in it]"}} + self.assertFalse(_is_bulk_item_ok(delete_error_1)) + delete_error_2 = {'delete': {'_type': '#foo', '_id': '10', '_index': 'test-index', + 'error': "InvalidTypeNameException[mapping type name [#foo] should not include '#' in it]"}} + self.assertFalse(_is_bulk_item_ok(delete_error_1)) + + index_all_ok = {'items': [ + index_ok_1, + index_ok_2, + index_ok_3], + 'took': 4} + delete_all_ok = {'items': [ + delete_ok_1, + delete_ok_2, + delete_ok_3], + 'took': 0} + index_one_error = {'items': [ + index_ok_1, + index_error_1], + 'took': 156} + index_two_errors = {'items': [ + index_ok_2, + index_error_1, + index_error_2], + 'took': 156} + delete_one_error = {'items': [ + delete_ok_1, + delete_error_1], + 'took': 1} + delete_two_errors = {'items': [ + delete_ok_2, + delete_error_1, + delete_error_2], + 'took': 1} + mixed_errors = {'items': [ + delete_ok_3, + index_ok_1, + index_error_1, + delete_error_1, + delete_error_2], + 'took': 1} + oops_all_errors = {'items': [ + index_error_1, + delete_error_1, + delete_error_2], + 'took': 1} + + self.assertIsNone(_raise_exception_if_bulk_item_failed(index_all_ok)) + self.assertIsNone(_raise_exception_if_bulk_item_failed(delete_all_ok)) + + with self.assertRaises(BulkOperationException) as cm: + _raise_exception_if_bulk_item_failed(index_one_error) + self.assertEquals(cm.exception, BulkOperationException( + [index_error_1], index_one_error)) + + with self.assertRaises(BulkOperationException) as cm: + _raise_exception_if_bulk_item_failed(index_two_errors) + self.assertEquals(cm.exception, BulkOperationException( + [index_error_1, index_error_2], index_two_errors)) + + with self.assertRaises(BulkOperationException) as cm: + _raise_exception_if_bulk_item_failed(delete_one_error) + self.assertEquals(cm.exception, BulkOperationException( + [delete_error_1], delete_one_error)) + + with self.assertRaises(BulkOperationException) as cm: + _raise_exception_if_bulk_item_failed(delete_two_errors) + self.assertEquals(cm.exception, BulkOperationException( + [delete_error_1, delete_error_2], delete_two_errors)) + + with self.assertRaises(BulkOperationException) as cm: + _raise_exception_if_bulk_item_failed(mixed_errors) + self.assertEquals(cm.exception, BulkOperationException( + [index_error_1, delete_error_1, delete_error_2], mixed_errors)) + + with self.assertRaises(BulkOperationException) as cm: + _raise_exception_if_bulk_item_failed(oops_all_errors) + self.assertEquals(cm.exception, BulkOperationException( + [index_error_1, delete_error_1, delete_error_2], oops_all_errors)) + + # now, try it against a real index... + self.conn.force_bulk() + self.conn.raise_on_bulk_item_failure = False + self.conn.bulk_size = 1 + + bulk_result = self.conn.delete(self.index_name, "#bogus", 9, bulk=True) + self.assertFalse(_is_bulk_item_ok(bulk_result["items"][0])) + + bulk_result = self.conn.index("invalid", self.index_name, self.document_type, 8, bulk=True) + self.assertFalse(_is_bulk_item_ok(bulk_result["items"][0])) + + self.conn.raise_on_bulk_item_failure = True + + with self.assertRaises(BulkOperationException) as cm: + self.conn.delete( + self.index_name, "#bogus", 9, bulk=True) + + with self.assertRaises(BulkOperationException) as cm: + self.conn.index( + "invalid", self.index_name, self.document_type, 8, bulk=True) diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_cluster.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_cluster.py new file mode 100644 index 0000000000..f838d05314 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_cluster.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from .estestcase import ESTestCase +import unittest + +class ClusterTestCase(ESTestCase): + def setUp(self): + super(ClusterTestCase, self).setUp() + mapping = {u'parsedtext': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'name': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'title': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'pos': {'store': 'yes', + 'type': u'integer'}, + u'uuid': {'boost': 1.0, + 'index': 'not_analyzed', + 'store': 'yes', + 'type': u'string'}} + self.conn.create_index(self.index_name) + self.conn.put_mapping(self.document_type, {'properties': mapping}, self.index_name) + self.conn.index({"name": "Joe Tester", "parsedtext": "Joe Testere nice guy", "uuid": "11111", "position": 1}, + self.index_name, self.document_type, 1) + self.conn.index({"name": "Bill Baloney", "parsedtext": "Bill Testere nice guy", "uuid": "22222", "position": 2}, + self.index_name, self.document_type, 2) + self.conn.index({"name": "Bill Clinton", "parsedtext": """Bill is not + nice guy""", "uuid": "33333", "position": 3}, self.index_name, self.document_type, 3) + self.conn.refresh(self.index_name) + + def test_ClusterState(self): + result = self.conn.cluster_state() + self.assertTrue('blocks' in result) + self.assertTrue('routing_table' in result) + + def test_ClusterNodes(self): + result = self.conn.cluster_nodes() + self.assertTrue('cluster_name' in result) + self.assertTrue('nodes' in result) + + def test_ClusterHealth(self): + result = self.conn.cluster_health() + print "health" + print result + +if __name__ == "__main__": + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_convert_errors.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_convert_errors.py new file mode 100644 index 0000000000..0cc9a1a904 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_convert_errors.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import unittest +from .estestcase import ESTestCase +from ..exceptions import (NotFoundException, IndexAlreadyExistsException) +from .. import convert_errors + + +class RaiseIfErrorTestCase(ESTestCase): + def test_not_found_exception(self): + self.assertRaises( + NotFoundException, + convert_errors.raise_if_error, + 404, {u'_type': u'a_type', u'_id': u'1', u'_index': u'_all'}) + + def test_nested_index_already_exists_exception(self): + self.assertRaises( + IndexAlreadyExistsException, + convert_errors.raise_if_error, + 400, {u'status': 400, + u'error': (u'RemoteTransportException[[name][inet' + + u'[/127.0.0.1:9300]][indices/createIndex]]; ' + + u'nested: IndexAlreadyExistsException[' + + u'[test-index] Already exists]; ')}) + +if __name__ == '__main__': + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_dump_curl.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_dump_curl.py new file mode 100644 index 0000000000..ef7347ecb1 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_dump_curl.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import unittest +from .estestcase import ESTestCase, get_conn +import StringIO + +class DumpCurlTestCase(ESTestCase): + def setUp(self): + super(DumpCurlTestCase, self).setUp() + + def testDumpCurl(self): + """Test errors thrown when creating or deleting indices. + + """ + dump = StringIO.StringIO() + conn = get_conn(dump_curl=dump) + result = conn.index(dict(title="Hi"), self.index_name, self.document_type) + self.assertTrue('ok' in result) + self.assertTrue('error' not in result) + dump = dump.getvalue() + self.assertTrue(""" + curl -XPOST 'http://127.0.0.1:9200/test-index/test-type?pretty=true' -d '{"title": "Hi"}' + """.strip() in dump) + +if __name__ == "__main__": + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_errors.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_errors.py new file mode 100644 index 0000000000..27438b3327 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_errors.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import unittest +from .estestcase import ESTestCase +from .. import exceptions + +class ErrorReportingTestCase(ESTestCase): + def setUp(self): + super(ErrorReportingTestCase, self).setUp() + self.conn.delete_index_if_exists(self.index_name) + + def tearDown(self): + self.conn.delete_index_if_exists(self.index_name) + + def testCreateDelete(self): + """Test errors thrown when creating or deleting indices. + + """ + result = self.conn.create_index(self.index_name) + self.assertTrue('ok' in result) + self.assertTrue('error' not in result) + + err = self.checkRaises(exceptions.IndexAlreadyExistsException, + self.conn.create_index, self.index_name) + self.assertEqual(str(err), "[test-index] Already exists") + self.assertEqual(err.status, 400) + self.assertTrue('error' in err.result) + self.assertTrue('ok' not in err.result) + + result = self.conn.delete_index(self.index_name) + self.assertTrue('ok' in result) + self.assertTrue('error' not in result) + + err = self.checkRaises(exceptions.IndexMissingException, + self.conn.delete_index, self.index_name) + self.assertEqual(str(err), "[test-index] missing") + self.assertEqual(err.status, 404) + self.assertTrue('error' in err.result) + self.assertTrue('ok' not in err.result) + + def testMissingIndex(self): + """Test generation of a IndexMissingException. + + """ + err = self.checkRaises(exceptions.IndexMissingException, + self.conn.flush, self.index_name) + self.assertEqual(str(err), "[test-index] missing") + self.assertEqual(err.status, 404) + self.assertTrue('error' in err.result) + self.assertTrue('ok' not in err.result) + + def testBadRequest(self): + """Test error reported by doing a bad request. + + """ + err = self.checkRaises(exceptions.ElasticSearchException, + self.conn._send_request, 'GET', '_bad_request') + self.assertEqual(str(err), "No handler found for uri [/_bad_request] and method [GET]") + self.assertEqual(err.status, 400) + self.assertEqual(err.result, 'No handler found for uri [/_bad_request] and method [GET]') + + def testDelete(self): + """Test error reported by deleting a missing document. + + """ + self.checkRaises(exceptions.NotFoundException, + self.conn.delete, self.index_name, "flibble", + "asdf") + + +if __name__ == "__main__": + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_esmodel.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_esmodel.py new file mode 100644 index 0000000000..d2a07ecc35 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_esmodel.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from copy import deepcopy +import unittest +from .estestcase import ESTestCase +from ..es import DotDict + +class ElasticSearchModelTestCase(ESTestCase): + def setUp(self): + super(ElasticSearchModelTestCase, self).setUp() + self.init_default_index() + + def test_ElasticSearchModel_init(self): + obj = self.conn.factory_object(self.index_name, self.document_type, {"name": "test", "val": 1}) + self.assertEqual(obj.name, "test") + obj.name = "aaa" + self.assertEqual(obj.name, "aaa") + self.assertEqual(obj.val, 1) + self.assertEqual(obj._meta.id, None) + obj._meta.id = "dasdas" + self.assertEqual(obj._meta.id, "dasdas") + self.assertEqual(sorted(obj.keys()), ["name", "val"]) + obj.save() + obj.name = "test2" + obj.save() + + reloaded = self.conn.get(self.index_name, self.document_type, obj._meta.id) + self.assertEqual(reloaded.name, "test2") + + def test_DotDict(self): + dotdict = DotDict(foo="bar") + dotdict2 = deepcopy(dotdict) + dotdict2["foo"] = "baz" + self.assertEqual(dotdict["foo"], "bar") + self.assertEqual(dotdict2["foo"], "baz") + self.assertEqual(type(dotdict2), DotDict) + + dotdict = DotDict(foo="bar", bar=DotDict(baz="qux")) + dotdict2 = deepcopy(dotdict) + dotdict2["bar"]["baz"] = "foo" + self.assertEqual(dotdict["bar"]["baz"], "qux") + self.assertEqual(dotdict2["bar"]["baz"], "foo") + self.assertEqual(type(dotdict2), DotDict) + +if __name__ == "__main__": + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_facets.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_facets.py new file mode 100644 index 0000000000..51ae6bc5a2 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_facets.py @@ -0,0 +1,142 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import unittest +from .estestcase import ESTestCase +from ..facets import DateHistogramFacet +from ..filters import TermFilter, RangeFilter +from ..query import FilteredQuery, MatchAllQuery, Search +from ..utils import ESRange +import datetime + +class FacetSearchTestCase(ESTestCase): + def setUp(self): + super(FacetSearchTestCase, self).setUp() + mapping = {u'parsedtext': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'name': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'title': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'position': {'store': 'yes', + 'type': u'integer'}, + u'tag': {'store': 'yes', + 'type': u'string'}, + u'date': {'store': 'yes', + 'type': u'date'}, + u'uuid': {'boost': 1.0, + 'index': 'not_analyzed', + 'store': 'yes', + 'type': u'string'}} + self.conn.create_index(self.index_name) + self.conn.put_mapping(self.document_type, {'properties': mapping}, self.index_name) + self.conn.index({"name": "Joe Tester", + "parsedtext": "Joe Testere nice guy", + "uuid": "11111", + "position": 1, + "tag": "foo", + "date": datetime.date(2011, 5, 16)}, + self.index_name, self.document_type, 1) + self.conn.index({"name": " Bill Baloney", + "parsedtext": "Bill Testere nice guy", + "uuid": "22222", + "position": 2, + "tag": "foo", + "date": datetime.date(2011, 4, 16)}, + self.index_name, self.document_type, 2) + self.conn.index({"name": "Bill Clinton", + "parsedtext": "Bill is not nice guy", + "uuid": "33333", + "position": 3, + "tag": "bar", + "date": datetime.date(2011, 4, 28)}, + self.index_name, self.document_type, 3) + self.conn.refresh(self.index_name) + + def test_terms_facet(self): + q = MatchAllQuery() + q = q.search() + q.facet.add_term_facet('tag') + resultset = self.conn.search(query=q, indices=self.index_name, doc_types=[self.document_type]) + self.assertEquals(resultset.total, 3) + self.assertEquals(resultset.facets.tag.terms, [{u'count': 2, u'term': u'foo'}, + {u'count': 1, u'term': u'bar'}]) + + q2 = MatchAllQuery() + q2 = q2.search() + q2.facet.add_term_facet('tag') + q3 = MatchAllQuery() + q3 = q3.search() + q3.facet.add_term_facet('tag') + self.assertEquals(q2, q3) + + q4 = MatchAllQuery() + q4 = q4.search() + q4.facet.add_term_facet('bag') + self.assertNotEquals(q2, q4) + + def test_terms_facet_filter(self): + q = MatchAllQuery() + q = FilteredQuery(q, TermFilter('tag', 'foo')) + q = q.search() + q.facet.add_term_facet('tag') + resultset = self.conn.search(query=q, indices=self.index_name, doc_types=[self.document_type]) + self.assertEquals(resultset.total, 2) + self.assertEquals(resultset.facets['tag']['terms'], [{u'count': 2, u'term': u'foo'}]) + self.assertEquals(resultset.facets.tag.terms, [{u'count': 2, u'term': u'foo'}]) + + q2 = MatchAllQuery() + q2 = FilteredQuery(q2, TermFilter('tag', 'foo')) + q2 = q2.search() + q2.facet.add_term_facet('tag') + q3 = MatchAllQuery() + q3 = FilteredQuery(q3, TermFilter('tag', 'foo')) + q3 = q3.search() + q3.facet.add_term_facet('tag') + self.assertEquals(q2, q3) + + q4 = MatchAllQuery() + q4 = FilteredQuery(q4, TermFilter('tag', 'foo')) + q4 = q4.search() + q4.facet.add_term_facet('bag') + self.assertNotEquals(q3, q4) + + def test_date_facet(self): + q = MatchAllQuery() + q = q.search() + q.facet.facets.append(DateHistogramFacet('date_facet', + field='date', + interval='month')) + resultset = self.conn.search(query=q, indices=self.index_name, doc_types=[self.document_type]) + self.assertEquals(resultset.total, 3) + self.assertEquals(resultset.facets.date_facet.entries, [{u'count': 2, u'time': 1301616000000}, + {u'count': 1, u'time': 1304208000000}]) + self.assertEquals(datetime.datetime.fromtimestamp(1301616000000 / 1000.).date(), + datetime.date(2011, 04, 01)) + self.assertEquals(datetime.datetime.fromtimestamp(1304208000000 / 1000.).date(), + datetime.date(2011, 05, 01)) + + def test_date_facet_filter(self): + q = MatchAllQuery() + q = FilteredQuery(q, RangeFilter(qrange=ESRange('date', + datetime.date(2011, 4, 1), + datetime.date(2011, 5, 1), + include_upper=False))) + q = q.search() + q.facet.facets.append(DateHistogramFacet('date_facet', + field='date', + interval='month')) + resultset = self.conn.search(query=q, indices=self.index_name, doc_types=[self.document_type]) + self.assertEquals(resultset.total, 2) + self.assertEquals(resultset.facets['date_facet']['entries'], [{u'count': 2, u'time': 1301616000000}]) + +if __name__ == "__main__": + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_geoloc.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_geoloc.py new file mode 100644 index 0000000000..a881e2df02 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_geoloc.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import unittest +from .estestcase import ESTestCase +from ..filters import GeoBoundingBoxFilter, GeoDistanceFilter, GeoPolygonFilter +from ..query import FilteredQuery, MatchAllQuery + +#--- Geo Queries Test case +class GeoQuerySearchTestCase(ESTestCase): + + def setUp(self): + super(GeoQuerySearchTestCase, self).setUp() + mapping = { + "pin" : { + "properties" : { + "location" : { + "type" : "geo_point" + } + } + } + } + self.conn.delete_index_if_exists("test-mindex") + self.conn.create_index("test-mindex") + self.conn.put_mapping(self.document_type, {'properties':mapping}, ["test-mindex"]) + self.conn.index({ + "pin" : { + "location" : { + "lat" : 40.12, + "lon" :-71.34 + } + } + }, "test-mindex", self.document_type, 1) + self.conn.index({ + "pin" : { + "location" : { + "lat" : 40.12, + "lon" : 71.34 + } + } + }, "test-mindex", self.document_type, 2) + + self.conn.refresh(["test-mindex"]) + + def tearDown(self): + self.conn.delete_index_if_exists("test-mindex") + + def test_GeoDistanceFilter(self): + gq = GeoDistanceFilter("pin.location", {"lat" : 40, "lon" :70}, "200km") + q = FilteredQuery(MatchAllQuery(), gq) + resultset = self.conn.search(query=q, indices=["test-mindex"]) + self.assertEquals(resultset.total, 1) + + gq = GeoDistanceFilter("pin.location", [70, 40], "200km") + q = FilteredQuery(MatchAllQuery(), gq) + resultset = self.conn.search(query=q, indices=["test-mindex"]) + self.assertEquals(resultset.total, 1) + + def test_GeoBoundingBoxFilter(self): + gq = GeoBoundingBoxFilter("pin.location", location_tl={"lat" : 40.717, "lon" : 70.99}, location_br={"lat" : 40.03, "lon" : 72.0}) + q = FilteredQuery(MatchAllQuery(), gq) + resultset = self.conn.search(query=q, indices=["test-mindex"]) + self.assertEquals(resultset.total, 1) + + gq = GeoBoundingBoxFilter("pin.location", [70.99, 40.717], [74.1, 40.03]) + q = FilteredQuery(MatchAllQuery(), gq) + result2 = self.conn.search(query=q, indices=["test-mindex"]) + self.assertEquals(result2.total, 1) +# del result['took'] +# del result2['took'] +# self.assertEquals(result, result2) + + def test_GeoPolygonFilter(self): + gq = GeoPolygonFilter("pin.location", [{"lat" : 50, "lon" :-30}, + {"lat" : 30, "lon" :-80}, + {"lat" : 80, "lon" :-90}] + ) + q = FilteredQuery(MatchAllQuery(), gq) + resultset = self.conn.search(query=q, indices=["test-mindex"]) + self.assertEquals(resultset.total, 1) + + gq = GeoPolygonFilter("pin.location", [[ -30, 50], + [ -80, 30], + [ -90, 80]] + ) + q = FilteredQuery(MatchAllQuery(), gq) + resultset = self.conn.search(query=q, indices=["test-mindex"]) + self.assertEquals(resultset.total, 1) + +if __name__ == "__main__": + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_highlight.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_highlight.py new file mode 100644 index 0000000000..563130ee03 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_highlight.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import unittest +from .estestcase import ESTestCase +from ..query import Search, StringQuery, HighLighter + +class QuerySearchTestCase(ESTestCase): + def setUp(self): + super(QuerySearchTestCase, self).setUp() + mapping = {u'parsedtext': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'name': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'title': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'pos': {'store': 'yes', + 'type': u'integer'}, + u'uuid': {'boost': 1.0, + 'index': 'not_analyzed', + 'store': 'yes', + 'type': u'string'}} + self.conn.create_index(self.index_name) + self.conn.put_mapping(self.document_type, {'properties': mapping}, self.index_name) + self.conn.index({"name": "Joe Tester", "parsedtext": "Joe Testere nice guy", "uuid": "11111", "position": 1}, + self.index_name, self.document_type, 1) + self.conn.index({"name": "Bill Baloney", "parsedtext": "Joe Testere nice guy", "uuid": "22222", "position": 2}, + self.index_name, self.document_type, 2) + self.conn.index({"parsedtext": "Joe Testere nice guy", "uuid": "22222", "position": 2}, self.index_name, + self.document_type, 2) + self.conn.refresh(self.index_name) + + def test_QueryHighlight(self): + q = Search(StringQuery("joe")) + q.add_highlight("parsedtext") + q.add_highlight("name") + resultset = self.conn.search(q, indices=self.index_name) + self.assertEquals(resultset.total, 2) + self.assertNotEqual(resultset[0]._meta.highlight, None) + + self.assertEquals(resultset[0]._meta.highlight[u"parsedtext"][0].strip(), + u'Joe Testere nice guy') + + def test_QueryHighlightWithHighLighter(self): + h = HighLighter([''], ['']) + q = Search(StringQuery("joe"), highlight=h) + q.add_highlight("parsedtext") + q.add_highlight("name") + resultset = self.conn.search(q, indices=self.index_name) + self.assertEquals(resultset.total, 2) + self.assertNotEqual(resultset[0]._meta.highlight, None) + + self.assertEquals(resultset[0]._meta.highlight[u"parsedtext"][0].strip(), + u'Joe Testere nice guy') + +if __name__ == "__main__": + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_index_stats.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_index_stats.py new file mode 100644 index 0000000000..e2d2c2e6f2 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_index_stats.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import unittest +from .estestcase import ESTestCase + +class IndexStatsTestCase(ESTestCase): + def setUp(self): + super(IndexStatsTestCase, self).setUp() + mapping = {u'parsedtext': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'name': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'title': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'pos': {'store': 'yes', + 'type': u'integer'}, + u'uuid': {'boost': 1.0, + 'index': 'not_analyzed', + 'store': 'yes', + 'type': u'string'}} + self.conn.create_index(self.index_name) + self.conn.put_mapping(self.document_type, {'properties': mapping}, self.index_name) + self.conn.put_mapping("test-type2", {"_parent": {"type": self.document_type}}, self.index_name) + self.conn.index({"name": "Joe Tester", "parsedtext": "Joe Testere nice guy", "uuid": "11111", "position": 1}, + self.index_name, self.document_type, 1) + self.conn.index({"name": "data1", "value": "value1"}, self.index_name, "test-type2", 1, parent=1) + self.conn.index({"name": "Bill Baloney", "parsedtext": "Bill Testere nice guy", "uuid": "22222", "position": 2}, + self.index_name, self.document_type, 2) + self.conn.index({"name": "data2", "value": "value2"}, self.index_name, "test-type2", 2, parent=2) + self.conn.index({"name": "Bill Clinton", "parsedtext": """Bill is not + nice guy""", "uuid": "33333", "position": 3}, self.index_name, self.document_type, 3) + + self.conn.default_indices = self.index_name + + self.conn.refresh() + + def test_all_indices(self): + result = self.conn.index_stats() + self.assertEquals(5, result._all.indices[self.index_name].total.docs.count) + + def test_select_indices(self): + result = self.conn.index_stats(self.index_name) + self.assertEquals(5, result._all.indices[self.index_name].total.docs.count) + +if __name__ == "__main__": + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_indexing.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_indexing.py new file mode 100644 index 0000000000..871d479330 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_indexing.py @@ -0,0 +1,240 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import unittest +from .estestcase import ESTestCase + +from ..query import TermQuery +from ..exceptions import (IndexAlreadyExistsException, + DocumentAlreadyExistsEngineException, + VersionConflictEngineException) +from time import sleep + +class IndexingTestCase(ESTestCase): + def setUp(self): + super(IndexingTestCase, self).setUp() + self.conn.delete_index_if_exists(self.index_name) + self.conn.delete_index_if_exists("test-index2") + self.conn.delete_index_if_exists("another-index") + self.conn.create_index(self.index_name) + self.conn.create_index("test-index2") + + def tearDown(self): + self.conn.delete_index_if_exists(self.index_name) + self.conn.delete_index_if_exists("test-index2") + self.conn.delete_index_if_exists("another-index") + + def testExists(self): + self.assertTrue(self.conn.exists_index(self.index_name)) + self.assertFalse(self.conn.exists_index("test-index5")) + + def testCollectInfo(self): + """ + Testing collecting server info + """ + self.conn.collect_info() + result = self.conn.info + self.assertTrue(result.has_key('server')) + self.assertTrue(result.has_key('aliases')) + self.assertTrue(result['server'].has_key('name')) + self.assertTrue(result['server'].has_key('version')) + + def testIndexingWithID(self): + """ + Testing an indexing given an ID + """ + result = self.conn.index({"name": "Joe Tester"}, self.index_name, self.document_type, 1) + self.assertResultContains(result, { + '_type': 'test-type', + '_id': '1', 'ok': True, + '_index': 'test-index'}) + + def testIndexingWithoutID(self): + """Testing an indexing given without ID""" + result = self.conn.index({"name": "Joe Tester"}, self.index_name, self.document_type) + self.assertResultContains(result, { + '_type': 'test-type', + 'ok': True, + '_index': 'test-index'}) + # should have an id of some value assigned. + self.assertTrue(result.has_key('_id') and result['_id']) + + def testExplicitIndexCreate(self): + """Creazione indice""" + self.conn.delete_index("test-index2") + result = self.conn.create_index("test-index2") + self.assertResultContains(result, {'acknowledged': True, 'ok': True}) + + def testDeleteByID(self): + self.conn.index({"name": "Joe Tester"}, self.index_name, self.document_type, 1) + self.conn.refresh(self.index_name) + result = self.conn.delete(self.index_name, self.document_type, 1) + self.assertResultContains(result, { + '_type': 'test-type', + '_id': '1', 'ok': True, + '_index': 'test-index'}) + + def testDeleteByIDWithEncoding(self): + self.conn.index({"name": "Joe Tester"}, self.index_name, self.document_type, "http://hello/?#'there") + self.conn.refresh(self.index_name) + result = self.conn.delete(self.index_name, self.document_type, "http://hello/?#'there") + self.assertResultContains(result, { + '_type': 'test-type', + '_id': 'http://hello/?#\'there', + 'ok': True, + '_index': 'test-index'}) + + def testDeleteIndex(self): + self.conn.create_index("another-index") + result = self.conn.delete_index("another-index") + self.assertResultContains(result, {'acknowledged': True, 'ok': True}) + + def testCannotCreateExistingIndex(self): + self.conn.create_index("another-index") + self.assertRaises(IndexAlreadyExistsException, self.conn.create_index, "another-index") + self.conn.delete_index("another-index") + + def testPutMapping(self): + result = self.conn.put_mapping(self.document_type, + {self.document_type: {"properties": {"name": {"type": "string", "store": "yes"}}}}, + indices=self.index_name) + self.assertResultContains(result, {'acknowledged': True, 'ok': True}) + + def testIndexStatus(self): + self.conn.create_index("another-index") + result = self.conn.status(["another-index"]) + self.conn.delete_index("another-index") + self.assertTrue(result.has_key('indices')) + self.assertResultContains(result, {'ok': True}) + + def testIndexFlush(self): + self.conn.create_index("another-index") + result = self.conn.flush(["another-index"]) + self.conn.delete_index("another-index") + self.assertResultContains(result, {'ok': True}) + + def testIndexRefresh(self): + self.conn.create_index("another-index") + result = self.conn.refresh(["another-index"]) + self.conn.delete_index("another-index") + self.assertResultContains(result, {'ok': True}) + + def testIndexOptimize(self): + self.conn.create_index("another-index") + result = self.conn.optimize(["another-index"]) + self.conn.delete_index("another-index") + self.assertResultContains(result, {'ok': True}) + + def testUpdate(self): + self.conn.index({"name": "Joe Tester", "sex": "male"}, + self.index_name, self.document_type, 1) + self.conn.refresh(self.index_name) + self.conn.update({"name": "Joe The Tester", "age": 23}, + self.index_name, self.document_type, 1) + self.conn.refresh(self.index_name) + result = self.conn.get(self.index_name, self.document_type, 1) + self.assertResultContains(result, {"name": "Joe The Tester", "sex": "male", "age": 23}) + self.assertResultContains(result._meta, + {"index": "test-index", "type": "test-type", "id": "1"}) + + def testUpdateUsingFunc(self): + def update_list_values(current, extra): + for k, v in extra.iteritems(): + if isinstance(current.get(k), list): + current[k].extend(v) + else: + current[k] = v + + self.conn.index({"name": "Joe Tester", "age": 23, "skills": ["QA"]}, + self.index_name, self.document_type, 1) + self.conn.refresh(self.index_name) + self.conn.update({"age": 24, "skills": ["cooking"]}, self.index_name, + self.document_type, 1, update_func=update_list_values) + self.conn.refresh(self.index_name) + result = self.conn.get(self.index_name, self.document_type, 1) + self.assertResultContains(result, {"name": "Joe Tester", "age": 24, + "skills": ["QA", "cooking"]}) + self.assertResultContains(result._meta, + {"index": "test-index", "type": "test-type", "id": "1"}) + + def testGetByID(self): + self.conn.index({"name": "Joe Tester"}, self.index_name, self.document_type, 1) + self.conn.index({"name": "Bill Baloney"}, self.index_name, self.document_type, 2) + self.conn.refresh(self.index_name) + result = self.conn.get(self.index_name, self.document_type, 1) + self.assertResultContains(result, {"name": "Joe Tester"}) + self.assertResultContains(result._meta, {"index": "test-index", + "type": "test-type", "id": "1"}) + + def testMultiGet(self): + self.conn.index({"name": "Joe Tester"}, self.index_name, self.document_type, 1) + self.conn.index({"name": "Bill Baloney"}, self.index_name, self.document_type, 2) + self.conn.refresh(self.index_name) + results = self.conn.mget(["1", "2"], self.index_name, self.document_type) + self.assertEqual(len(results), 2) + + def testGetCountBySearch(self): + self.conn.index({"name": "Joe Tester"}, self.index_name, self.document_type, 1) + self.conn.index({"name": "Bill Baloney"}, self.index_name, self.document_type, 2) + self.conn.refresh(self.index_name) + q = TermQuery("name", "joe") + result = self.conn.count(q, indices=self.index_name) + self.assertResultContains(result, {'count': 1}) + + + # def testSearchByField(self): + # resultset = self.conn.search("name:joe") + # self.assertResultContains(result, {'hits': {'hits': [{'_type': 'test-type', '_id': '1', '_source': {'name': 'Joe Tester'}, '_index': 'test-index'}], 'total': 1}}) + + # def testTermsByField(self): + # result = self.conn.terms(['name']) + # self.assertResultContains(result, {'docs': {'max_doc': 2, 'num_docs': 2, 'deleted_docs': 0}, 'fields': {'name': {'terms': [{'term': 'baloney', 'doc_freq': 1}, {'term': 'bill', 'doc_freq': 1}, {'term': 'joe', 'doc_freq': 1}, {'term': 'tester', 'doc_freq': 1}]}}}) + # + # def testTermsByIndex(self): + # result = self.conn.terms(['name'], indices=['test-index']) + # self.assertResultContains(result, {'docs': {'max_doc': 2, 'num_docs': 2, 'deleted_docs': 0}, 'fields': {'name': {'terms': [{'term': 'baloney', 'doc_freq': 1}, {'term': 'bill', 'doc_freq': 1}, {'term': 'joe', 'doc_freq': 1}, {'term': 'tester', 'doc_freq': 1}]}}}) + # + # def testTermsMinFreq(self): + # result = self.conn.terms(['name'], min_freq=2) + # self.assertResultContains(result, {'docs': {'max_doc': 2, 'num_docs': 2, 'deleted_docs': 0}, 'fields': {'name': {'terms': []}}}) + + def testMLT(self): + self.conn.index({"name": "Joe Test"}, self.index_name, self.document_type, 1) + self.conn.index({"name": "Joe Tester"}, self.index_name, self.document_type, 2) + self.conn.index({"name": "Joe did the test"}, self.index_name, self.document_type, 3) + self.conn.refresh(self.index_name) + sleep(0.5) + result = self.conn.morelikethis(self.index_name, self.document_type, 1, ['name'], min_term_freq=1, + min_doc_freq=1) + del result[u'took'] + self.assertResultContains(result, {u'_shards': {u'successful': 5, u'failed': 0, u'total': 5}}) + self.assertTrue(u'hits' in result) + self.assertResultContains(result["hits"], {"hits": [ + {"_score": 0.2169777, "_type": "test-type", "_id": "3", "_source": {"name": "Joe did the test"}, + "_index": "test-index"}, + {"_score": 0.19178301, "_type": "test-type", "_id": "2", "_source": {"name": "Joe Tester"}, + "_index": "test-index"}, + ], "total": 2, "max_score": 0.2169777}) + + # fails because arrays don't work. annoying!!! + ''' + self.assertEqual(2, result['hits']['total']) + self.assertEqual(0.19178301, result['hits']['max_score']) + self.assertResultContains({'wtf':result['hits']['hits']}, {'wtf':[ + {u'_score': 0.19178301, u'_type': u'test-type', u'_id': u'3', u'_source': {u'name': u'Joe Tested'}, u'_index': u'test-index'}, + {u'_score': 0.19178301, u'_type': u'test-type', u'_id': u'2', u'_source': {u'name': u'Joe Tester'}, u'_index': u'test-index'}, + ]}) + ''' + + def testVersion(self): + self.conn.index({"name": "Joe Test"}, self.index_name, self.document_type, 1, force_insert=True) + self.assertRaises(DocumentAlreadyExistsEngineException, self.conn.index, + {"name": "Joe Test2"}, self.index_name, self.document_type, 1, force_insert=True) + self.conn.index({"name": "Joe Test"}, self.index_name, self.document_type, 1, version=1) + self.conn.index({"name": "Joe Test"}, self.index_name, self.document_type, 1, version=2) + self.assertRaises(VersionConflictEngineException, self.conn.index, + {"name": "Joe Test2"}, self.index_name, self.document_type, 1, version=2) + item = self.conn.get(self.index_name, self.document_type, 1) + self.assertEqual(item._meta.version, 3) + +if __name__ == "__main__": + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_mapping_parser.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_mapping_parser.py new file mode 100644 index 0000000000..d11db15429 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_mapping_parser.py @@ -0,0 +1,13 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from .estestcase import ESTestCase +from .. import decode_json +from ..mappings import Mapper + +class MapperTestCase(ESTestCase): + def test_parser(self): + self.datamap = decode_json(self.get_datafile("map.json")) + _ = Mapper(self.datamap) + + #mapping = self.conn.get_mapping() + #self.dump(mapping) diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_multifield.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_multifield.py new file mode 100644 index 0000000000..6ce15ebd9d --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_multifield.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import unittest +from .estestcase import ESTestCase +from ..query import TermQuery + +class MultifieldTestCase(ESTestCase): + def setUp(self): + super(MultifieldTestCase, self).setUp() + mapping = {u'parsedtext': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'title': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'name': {"type": "multi_field", + "fields": { + u'name': { + u'boost': 1.0, + u'index': u'analyzed', + u'omit_norms': False, + u'omit_term_freq_and_positions': False, + u'store': u'yes', + "term_vector": "with_positions_offsets", + u'type': u'string'}, + u'untouched': {u'boost': 1.0, + u'index': u'not_analyzed', + u'omit_norms': False, + u'omit_term_freq_and_positions': False, + u'store': u'yes', + "term_vector": "no", + u'type': u'string'} + + } + + }, + + u'pos': {'store': 'yes', + 'type': u'integer'}, + u'uuid': {'boost': 1.0, + 'index': 'not_analyzed', + 'store': 'yes', + 'type': u'string'}} + self.conn.create_index(self.index_name) + self.conn.put_mapping(self.document_type, {'properties': mapping}, self.index_name) + self.conn.index({"name": "Joe Tester", "parsedtext": "Joe Testere nice guy", "uuid": "11111", "position": 1}, + self.index_name, self.document_type, 1) + self.conn.index({"name": "Bill Baloney", "parsedtext": "Joe Testere nice guy", "uuid": "22222", "position": 2}, + self.index_name, self.document_type, 2) + self.conn.index({"value": "Joe Tester"}, self.index_name, self.document_type) + self.conn.index({"value": 123343543536}, self.index_name, self.document_type) + self.conn.index({"value": True}, self.index_name, self.document_type) + self.conn.index({"value": 43.32}, self.index_name, self.document_type) + #self.conn.index({"value": datetime.now()}, self.index_name, self.document_type) + self.conn.refresh(self.index_name) + + def test_TermQuery(self): + q = TermQuery("name", "joe") + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + + q = TermQuery("name", "joe", 3) + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + + q = TermQuery("name", "joe", "3") + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + + q = TermQuery("value", 43.32) + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + + +if __name__ == "__main__": + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_nested.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_nested.py new file mode 100644 index 0000000000..a74a301898 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_nested.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import unittest +from .estestcase import ESTestCase +from ..filters import TermFilter, NestedFilter +from ..query import FilteredQuery, MatchAllQuery, BoolQuery, TermQuery + +class NestedSearchTestCase(ESTestCase): + def setUp(self): + super(NestedSearchTestCase, self).setUp() + + mapping = { + 'nested1': { + 'type': 'nested' + } + } + self.conn.create_index(self.index_name) + self.conn.put_mapping(self.document_type, {'properties': mapping}, self.index_name) + self.conn.index({"field1": "value1", + "nested1": [{"n_field1": "n_value1_1", + "n_field2": "n_value2_1"}, + {"n_field1": "n_value1_2", + "n_field2": "n_value2_2"}]}, + self.index_name, self.document_type, 1) + self.conn.index({"field1": "value1", + "nested1": [{"n_field1": "n_value1_1", + "n_field2": "n_value2_2"}, + {"n_field1": "n_value1_2", + "n_field2": "n_value2_1"}]}, + self.index_name, self.document_type, 2) + self.conn.refresh(self.index_name) + + def test_nested_filter(self): + q = FilteredQuery(MatchAllQuery(), + TermFilter('_all', 'n_value1_1')) + resultset = self.conn.search(query=q, indices=self.index_name, doc_types=[self.document_type]) + self.assertEquals(resultset.total, 2) + + q = FilteredQuery(MatchAllQuery(), + TermFilter('nested1.n_field1', 'n_value1_1')) + resultset = self.conn.search(query=q, indices=self.index_name, doc_types=[self.document_type]) + self.assertEquals(resultset.total, 0) + + q = FilteredQuery(MatchAllQuery(), + TermFilter('nested1.n_field1', 'n_value1_1')) + resultset = self.conn.search(query=q, indices=self.index_name, doc_types=[self.document_type]) + self.assertEquals(resultset.total, 0) + + q = FilteredQuery(MatchAllQuery(), + NestedFilter('nested1', + BoolQuery(must=[TermQuery('nested1.n_field1', 'n_value1_1')]))) + resultset = self.conn.search(query=q, indices=self.index_name, doc_types=[self.document_type]) + self.assertEquals(resultset.total, 2) + + q = FilteredQuery(MatchAllQuery(), + NestedFilter('nested1', + BoolQuery(must=[TermQuery('nested1.n_field1', 'n_value1_1'), + TermQuery('nested1.n_field2', 'n_value2_1')]))) + resultset = self.conn.search(query=q, indices=self.index_name, doc_types=[self.document_type]) + self.assertEquals(resultset.total, 1) + + +if __name__ == "__main__": + unittest.main() + diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_percolator.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_percolator.py new file mode 100644 index 0000000000..1e6cac3114 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_percolator.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import unittest +from .estestcase import ESTestCase +from ..query import * +import unittest + +class PercolatorTestCase(ESTestCase): + def setUp(self): + super(PercolatorTestCase, self).setUp() + mapping = { u'parsedtext': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector" : "with_positions_offsets"}, + u'name': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector" : "with_positions_offsets"}, + u'title': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector" : "with_positions_offsets"}, + u'pos': {'store': 'yes', + 'type': u'integer'}, + u'uuid': {'boost': 1.0, + 'index': 'not_analyzed', + 'store': 'yes', + 'type': u'string'}} + self.conn.create_index(self.index_name) + self.conn.put_mapping(self.document_type, {'properties':mapping}, self.index_name) + self.conn.create_percolator( + 'test-index', + 'test-perc1', + StringQuery(query='apple', search_fields='_all') + ) + self.conn.create_percolator( + 'test-index', + 'test-perc2', + StringQuery(query='apple OR iphone', search_fields='_all') + ) + self.conn.create_percolator( + 'test-index', + 'test-perc3', + StringQuery(query='apple AND iphone', search_fields='_all') + ) + self.conn.refresh(self.index_name) + + def test_percolator(self): + results = self.conn.percolate('test-index', 'test-type', PercolatorQuery({'name': 'iphone'})) + self.assertTrue('test-perc1' not in results['matches']) + self.assertTrue('test-perc2' in results['matches']) + self.assertTrue('test-perc3' not in results['matches']) + + def test_or(self): + results = self.conn.percolate('test-index', 'test-type', PercolatorQuery({'name': 'apple'})) + self.assertTrue('test-perc1' in results['matches']) + self.assertTrue('test-perc2' in results['matches']) + self.assertTrue('test-perc3' not in results['matches']) + + def test_and(self): + results = self.conn.percolate('test-index', 'test-type', PercolatorQuery({'name': 'apple iphone'})) + self.assertTrue('test-perc1' in results['matches']) + self.assertTrue('test-perc2' in results['matches']) + self.assertTrue('test-perc3' in results['matches']) + + def tearDown(self): + self.conn.delete_percolator('test-index', 'test-perc1') + self.conn.delete_percolator('test-index', 'test-perc2') + self.conn.delete_percolator('test-index', 'test-perc3') + super(PercolatorTestCase, self).tearDown() + + +if __name__ == "__main__": + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_queries.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_queries.py new file mode 100644 index 0000000000..80d6219316 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_queries.py @@ -0,0 +1,422 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import unittest +from .estestcase import ESTestCase +from ..query import * +from ..filters import TermFilter, ANDFilter, ORFilter, RangeFilter, RawFilter, IdsFilter, MatchAllFilter, NotFilter +from ..utils import ESRangeOp + +class QuerySearchTestCase(ESTestCase): + def setUp(self): + super(QuerySearchTestCase, self).setUp() + mapping = {u'parsedtext': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'name': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'title': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'pos': {'store': 'yes', + 'type': u'integer'}, + u'uuid': {'boost': 1.0, + 'index': 'not_analyzed', + 'store': 'yes', + 'type': u'string'}} + self.conn.create_index(self.index_name) + self.conn.put_mapping(self.document_type, {'properties': mapping}, self.index_name) + self.conn.put_mapping("test-type2", {"_parent": {"type": self.document_type}}, self.index_name) + self.conn.index({"name": "Joe Tester", "parsedtext": "Joe Testere nice guy", "uuid": "11111", "position": 1}, + self.index_name, self.document_type, 1) + self.conn.index({"name": "data1", "value": "value1"}, self.index_name, "test-type2", 1, parent=1) + self.conn.index({"name": "Bill Baloney", "parsedtext": "Bill Testere nice guy", "uuid": "22222", "position": 2}, + self.index_name, self.document_type, 2) + self.conn.index({"name": "data2", "value": "value2"}, self.index_name, "test-type2", 2, parent=2) + self.conn.index({"name": "Bill Clinton", "parsedtext": """Bill is not + nice guy""", "uuid": "33333", "position": 3}, self.index_name, self.document_type, 3) + + self.conn.default_indices = self.index_name + + self.conn.refresh() + + def test_TermQuery(self): + q = TermQuery("name", "joe") + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + self.assertEquals(q, TermQuery("name", "joe")) + self.assertNotEquals(q, TermQuery("name", "job")) + + q = TermQuery("name", "joe", 3) + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + self.assertEquals(q, TermQuery("name", "joe", 3)) + self.assertNotEquals(q, TermQuery("name", "joe", 4)) + + q = TermQuery("name", "joe", "3") + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + self.assertEquals(q, TermQuery("name", "joe", "3")) + self.assertNotEquals(q, TermQuery("name", "joe", "4")) + + def test_WildcardQuery(self): + q = WildcardQuery("name", "jo*") + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + self.assertEquals(q, WildcardQuery("name", "jo*")) + self.assertNotEquals(q, WildcardQuery("name", "bo*")) + + q = WildcardQuery("name", "jo*", 3) + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + self.assertEquals(q, WildcardQuery("name", "jo*", 3)) + self.assertNotEquals(q, WildcardQuery("name", "jo*", 4)) + + q = WildcardQuery("name", "jo*", "3") + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + self.assertEquals(q, WildcardQuery("name", "jo*", "3")) + self.assertNotEquals(q, WildcardQuery("name", "jo*", "4")) + + def test_PrefixQuery(self): + q = PrefixQuery("name", "jo") + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + self.assertEquals(q, PrefixQuery("name", "jo")) + self.assertNotEquals(q, PrefixQuery("name", "bo")) + + q = PrefixQuery("name", "jo", 3) + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + self.assertEquals(q, PrefixQuery("name", "jo", 3)) + self.assertNotEquals(q, PrefixQuery("name", "jo", 4)) + + q = PrefixQuery("name", "jo", "3") + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + self.assertEquals(q, PrefixQuery("name", "jo", "3")) + self.assertNotEquals(q, PrefixQuery("name", "jo", "4")) + + def test_MatchAllQuery(self): + q = MatchAllQuery() + resultset = self.conn.search(query=q, indices=self.index_name, doc_types=[self.document_type]) + self.assertEquals(resultset.total, 3) + self.assertEquals(q, MatchAllQuery()) + + def test_StringQuery(self): + q = StringQuery("joe AND test") + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 0) + self.assertEquals(q, StringQuery("joe AND test")) + self.assertNotEquals(q, StringQuery("moe AND test")) + + q = StringQuery("joe OR test") + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + self.assertEquals(q, StringQuery("joe OR test")) + self.assertNotEquals(q, StringQuery("moe OR test")) + + q1 = StringQuery("joe") + q2 = StringQuery("test") + q = BoolQuery(must=[q1, q2]) + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 0) + self.assertEquals(q, BoolQuery(must=[StringQuery("joe"), StringQuery("test")])) + self.assertNotEquals(q, BoolQuery(must=[StringQuery("moe"), StringQuery("test")])) + + q = BoolQuery(should=[q1, q2]) + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + self.assertEquals(q, BoolQuery(should=[StringQuery("joe"), StringQuery("test")])) + self.assertNotEquals(q, BoolQuery(should=[StringQuery("moe"), StringQuery("test")])) + + def test_OR_AND_Filters(self): + q1 = TermFilter("position", 1) + q2 = TermFilter("position", 2) + andq = ANDFilter([q1, q2]) + + q = FilteredQuery(MatchAllQuery(), andq) + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 0) + self.assertEquals(q, FilteredQuery(MatchAllQuery(), + ANDFilter([TermFilter("position", 1), TermFilter("position", 2)]))) + self.assertNotEquals(q, FilteredQuery(MatchAllQuery(), + ANDFilter([TermFilter("position", 1), TermFilter("position", 3)]))) + + orq = ORFilter([q1, q2]) + q = FilteredQuery(MatchAllQuery(), orq) + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 2) + self.assertEquals(q, FilteredQuery(MatchAllQuery(), + ORFilter([TermFilter("position", 1), TermFilter("position", 2)]))) + self.assertNotEquals(q, FilteredQuery(MatchAllQuery(), + ORFilter([TermFilter("position", 1), TermFilter("position", 3)]))) + + def test_FieldQuery(self): + q = FieldQuery(FieldParameter("name", "+joe")) + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + self.assertEquals(q, FieldQuery(FieldParameter("name", "+joe"))) + self.assertNotEquals(q, FieldQuery(FieldParameter("name", "+job"))) + + def test_DisMaxQuery(self): + q = DisMaxQuery(FieldQuery(FieldParameter("name", "+joe"))) + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + self.assertEquals(q, DisMaxQuery(FieldQuery(FieldParameter("name", "+joe")))) + self.assertNotEquals(q, DisMaxQuery(FieldQuery(FieldParameter("name", "+job")))) + + def test_FuzzyQuery(self): + q = FuzzyQuery('name', 'data') + resultset = self.conn.search(query=q, indices=self.index_name) + + self.assertEquals(resultset.total, 2) + self.assertEquals(q, FuzzyQuery('name', 'data')) + self.assertNotEquals(q, FuzzyQuery('name', 'data2')) + + def test_HasChildQuery(self): + q = HasChildQuery(type="test-type2", query=TermQuery("name", "data1")) + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + self.assertEquals(q, HasChildQuery(type="test-type2", query=TermQuery("name", "data1"))) + self.assertNotEquals(q, HasChildQuery(type="test-type2", query=TermQuery("name", "data2"))) + + def test_RegexTermQuery(self): + # Don't run this test, because it depends on the RegexTermQuery + # feature which is not currently in elasticsearch trunk. + return + + # q = RegexTermQuery("name", "jo.") + # resultset = self.conn.search(query=q, indices=self.index_name) + # self.assertEquals(resultset.total, 1) + # # When this test is re-enabled, be sure to add equality and inequality tests (issue 128) + + def test_CustomScoreQueryMvel(self): + q = CustomScoreQuery(query=MatchAllQuery(), + lang="mvel", + script="_score*(5+doc.position.value)" + ) + self.assertEquals(q, + CustomScoreQuery(query=MatchAllQuery(), + lang="mvel", + script="_score*(5+doc.position.value)" + )) + self.assertNotEquals(q, + CustomScoreQuery(query=MatchAllQuery(), + lang="mvel", + script="_score*(6+doc.position.value)" + )) + resultset = self.conn.search(query=q, indices=self.index_name, doc_types=[self.document_type]) + self.assertEquals(resultset.total, 3) + self.assertEquals(resultset[0]._meta.score, 8.0) + self.assertEquals(resultset[1]._meta.score, 7.0) + self.assertEquals(resultset.max_score, 8.0) + + def test_CustomScoreQueryJS(self): + q = CustomScoreQuery(query=MatchAllQuery(), + lang="js", + script="parseFloat(_score*(5+doc.position.value))" + ) + resultset = self.conn.search(query=q, indices=self.index_name, doc_types=[self.document_type]) + self.assertEquals(resultset.total, 3) + self.assertEquals(resultset[0]._meta.score, 8.0) + self.assertEquals(resultset[1]._meta.score, 7.0) + self.assertEquals(resultset.max_score, 8.0) + + def test_CustomScoreQueryPython(self): + q = CustomScoreQuery(query=MatchAllQuery(), + lang="python", + script="_score*(5+doc['position'].value)" + ) + resultset = self.conn.search(query=q, indices=self.index_name, doc_types=[self.document_type]) + self.assertEquals(resultset.total, 3) + self.assertEquals(resultset[0]._meta.score, 8.0) + self.assertEquals(resultset[1]._meta.score, 7.0) + self.assertEquals(resultset.max_score, 8.0) + + def test_Search_stats(self): + no_stats_group = Search(TermQuery("foo", "bar")) + one_stats_group = Search(TermQuery("foo", "bar"), stats="hello") + many_stats_groups = Search(TermQuery("foo", "bar"), stats=["hello", "there", "test"]) + + self.assertEquals(no_stats_group.stats, None) + self.assertEquals(one_stats_group.stats, "hello") + self.assertEquals(many_stats_groups.stats, ["hello", "there", "test"]) + + self.assertEquals(no_stats_group.serialize(), + {"query": {"term": {"foo": "bar"}}}) + self.assertEquals(one_stats_group.serialize(), + {"query": {"term": {"foo": "bar"}}, "stats": "hello"}) + self.assertEquals(many_stats_groups.serialize(), + {"query": {"term": {"foo": "bar"}}, "stats": ["hello", "there", "test"]}) + + def test_Search_equality(self): + self.assertEquals(Search(), + Search()) + self.assertNotEquals(Search(), + Search(query=TermQuery("h", "ello"))) + self.assertEquals(Search(query=TermQuery("h", "ello")), + Search(query=TermQuery("h", "ello"))) + self.assertNotEquals(Search(query=TermQuery("h", "ello")), + Search(query=TermQuery("j", "ello"))) + self.assertEquals(Search(filter=TermFilter("h", "ello")), + Search(filter=TermFilter("h", "ello"))) + self.assertNotEquals(Search(filter=TermFilter("h", "ello")), + Search(filter=TermFilter("j", "ello"))) + self.assertEquals(Search(query=TermQuery("h", "ello"), filter=TermFilter("h", "ello")), + Search(query=TermQuery("h", "ello"), filter=TermFilter("h", "ello"))) + self.assertNotEquals(Search(query=TermQuery("h", "ello"), filter=TermFilter("h", "ello")), + Search(query=TermQuery("j", "ello"), filter=TermFilter("j", "ello"))) + + def test_ESRange_equality(self): + self.assertEquals(RangeQuery(), + RangeQuery()) + self.assertEquals(RangeQuery(ESRange("foo", 1, 2)), + RangeQuery(ESRange("foo", 1, 2))) + self.assertNotEquals(RangeQuery(ESRange("foo", 1, 2)), + RangeQuery(ESRange("bar", 1, 2))) + self.assertEquals(RangeFilter(), + RangeFilter()) + self.assertEquals(RangeFilter(ESRange("foo", 1, 2)), + RangeFilter(ESRange("foo", 1, 2))) + self.assertNotEquals(RangeFilter(ESRange("foo", 1, 2)), + RangeFilter(ESRange("bar", 1, 2))) + self.assertEquals(ESRange("foo"), + ESRange("foo")) + self.assertNotEquals(ESRange("foo"), + ESRange("bar")) + self.assertEquals(ESRange("foo", 1), + ESRange("foo", 1)) + self.assertNotEquals(ESRange("foo", 1), + ESRange("foo", 2)) + self.assertEquals(ESRange("foo", 1, 2), + ESRange("foo", 1, 2)) + self.assertNotEquals(ESRange("foo", 1, 2), + ESRange("foo", 1, 3)) + self.assertEquals(ESRange("foo", 1, 2, True, False), + ESRange("foo", 1, 2, True, False)) + self.assertNotEquals(ESRange("foo", 1, 2, True, False), + ESRange("foo", 1, 2, False, True)) + self.assertEquals(ESRangeOp("foo", "gt", 5), + ESRangeOp("foo", "gt", 5)) + self.assertEquals(ESRangeOp("bar", "lt", 6), + ESRangeOp("bar", "lt", 6)) + + def test_RawFilter_dict(self): + filter_ = dict(ids=dict(type="my_type", values=["1", "4", "100"])) + self.assertEqual(RawFilter(filter_), RawFilter(filter_)) + self.assertEqual(RawFilter(filter_).serialize(), filter_) + self.assertEqual(RawFilter(filter_).serialize(), + IdsFilter(type="my_type", values=["1", "4", "100"]).serialize()) + + def test_RawFilter_string(self): + filter_ = dict(ids=dict(type="my_type", values=["1", "4", "100"])) + filter_string = json.dumps(filter_) + self.assertEqual(RawFilter(filter_string), RawFilter(filter_string)) + self.assertEqual(RawFilter(filter_string), RawFilter(filter_)) + self.assertEqual(RawFilter(filter_string).serialize(), filter_) + self.assertEqual(RawFilter(filter_string).serialize(), + IdsFilter(type="my_type", values=["1", "4", "100"]).serialize()) + + def test_RawFilter_search(self): + filter_ = dict(ids=dict(type="my_type", values=["1", "4", "100"])) + filter_string = json.dumps(filter_) + + self.assertEqual(Search(filter=RawFilter(filter_)).serialize(), + dict(filter=filter_)) + self.assertEqual(Search(filter=RawFilter(filter_string)).serialize(), + dict(filter=filter_)) + + def test_CustomFiltersScoreQuery_ScoreMode(self): + self.assertEquals(CustomFiltersScoreQuery.ScoreMode.FIRST, "first") + self.assertEquals(CustomFiltersScoreQuery.ScoreMode.MIN, "min") + self.assertEquals(CustomFiltersScoreQuery.ScoreMode.MAX, "max") + self.assertEquals(CustomFiltersScoreQuery.ScoreMode.TOTAL, "total") + self.assertEquals(CustomFiltersScoreQuery.ScoreMode.AVG, "avg") + self.assertEquals(CustomFiltersScoreQuery.ScoreMode.MULTIPLY, "multiply") + + def test_CustomFiltersScoreQuery_Filter(self): + with self.assertRaises(ValueError) as cm: + CustomFiltersScoreQuery.Filter(MatchAllFilter()) + self.assertEquals(cm.exception.message, "Exactly one of boost and script must be specified") + + with self.assertRaises(ValueError) as cm: + CustomFiltersScoreQuery.Filter(MatchAllFilter(), 5.0, "someScript") + self.assertEquals(cm.exception.message, "Exactly one of boost and script must be specified") + + filter1 = CustomFiltersScoreQuery.Filter(MatchAllFilter(), 5.0) + self.assertEquals(filter1, CustomFiltersScoreQuery.Filter(MatchAllFilter(), 5.0)) + self.assertEquals(filter1.filter_, MatchAllFilter()) + self.assertEquals(filter1.boost, 5.0) + self.assertIsNone(filter1.script) + self.assertEquals(filter1.serialize(), {'filter': {'match_all': {}}, 'boost': 5.0}) + + filter2 = CustomFiltersScoreQuery.Filter(NotFilter(MatchAllFilter()), script="hello") + self.assertEquals(filter2, CustomFiltersScoreQuery.Filter(NotFilter(MatchAllFilter()), script="hello")) + self.assertEquals(filter2.filter_, NotFilter(MatchAllFilter())) + self.assertEquals(filter2.script, "hello") + self.assertIsNone(filter2.boost) + self.assertEquals(filter2.serialize(), {'filter': {'not': {'filter': {'match_all': {}}}}, 'script': 'hello'}) + + def test_CustomFiltersScoreQuery(self): + script1 = "max(1,2)" + script2 = "min(1,2)" + + filter1 = CustomFiltersScoreQuery.Filter(MatchAllFilter(), 5.0) + filter2 = CustomFiltersScoreQuery.Filter(NotFilter(MatchAllFilter()), + script=script1) + filter3 = CustomFiltersScoreQuery.Filter(NotFilter(MatchAllFilter()), + script=script2) + + q1 = MatchAllQuery() + q2 = TermQuery("foo", "bar") + + cfsq1 = CustomFiltersScoreQuery(q1, [filter1, filter2]) + self.assertEquals(cfsq1, CustomFiltersScoreQuery(q1, [filter1, filter2])) + self.assertEquals(cfsq1.query, q1) + self.assertEquals(cfsq1.filters, [filter1, filter2]) + self.assertIsNone(cfsq1.score_mode) + self.assertIsNone(cfsq1.params) + self.assertIsNone(cfsq1.lang) + self.assertEquals(cfsq1.serialize(), + {'custom_filters_score': { + 'query': {'match_all': {}}, + 'filters': [ + filter1.serialize(), + filter2.serialize() + ]}}) + + params1 = {"foo": "bar"} + lang1 = "mvel" + cfsq2 = CustomFiltersScoreQuery(q2, [filter1, filter2, filter3], + CustomFiltersScoreQuery.ScoreMode.MAX, + params1, lang1) + self.assertEquals(cfsq2, + CustomFiltersScoreQuery(q2, [filter1, filter2, filter3], + CustomFiltersScoreQuery.ScoreMode.MAX, + params1, lang1)) + self.assertEquals(cfsq2.query, q2) + self.assertEquals(cfsq2.filters, [filter1, filter2, filter3]) + self.assertEquals(cfsq2.score_mode, CustomFiltersScoreQuery.ScoreMode.MAX) + self.assertEquals(cfsq2.params, params1) + self.assertEquals(cfsq2.lang, lang1) + self.assertEquals(cfsq2.serialize(), + {'custom_filters_score': { + 'query': {'term': {'foo': 'bar'}}, + 'filters': [ + filter1.serialize(), + filter2.serialize(), + filter3.serialize() + ], + 'score_mode': 'max', + 'lang': 'mvel', + 'params': {"foo": "bar"}}}) + +if __name__ == "__main__": + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_resultset.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_resultset.py new file mode 100644 index 0000000000..419e31f827 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_resultset.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import unittest +from .estestcase import ESTestCase +from ..query import MatchAllQuery, Search + +class ResultsetTestCase(ESTestCase): + def setUp(self): + super(ResultsetTestCase, self).setUp() + self.init_default_index() + + for i in xrange(1000): + self.conn.index( + {"name": "Joe Tester%d" % i, "parsedtext": "Joe Testere nice guy", "uuid": "11111", "position": i}, + self.index_name, self.document_type, i, bulk=True) + self.conn.refresh(self.index_name) + + def test_iterator(self): + resultset = self.conn.search(Search(MatchAllQuery(), size=20), self.index_name, self.document_type) + self.assertEqual(len([p for p in resultset]), 20) + resultset = self.conn.search(Search(MatchAllQuery(), size=10), self.index_name, self.document_type) + self.assertEqual(len([p for p in resultset[:10]]), 10) + self.assertEqual(resultset[10].uuid, "11111") + self.assertEqual(resultset.total, 1000) + + def test_iterator_offset(self): + # Query for a block of 10, starting at position 10: + # + resultset = self.conn.search(Search(MatchAllQuery(), start=10, size=10, sort={'position': {'order': 'asc'}}), + self.index_name, self.document_type, + start=10, size=10) + + # Ensure that there are 1000 results: + # + self.assertEqual(len(resultset), 1000) + + # Now check that we actually have records 10-19, rather than 0-9: + # + position = 0 + for r in resultset: + self.assertEqual(r.position, position + 10) + position += 1 + +if __name__ == "__main__": + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_rivers.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_rivers.py new file mode 100644 index 0000000000..84f496fbe7 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_rivers.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import unittest +from .estestcase import ESTestCase +from ..rivers import CouchDBRiver, RabbitMQRiver, TwitterRiver + +class RiversTestCase(ESTestCase): + def setUp(self): + super(RiversTestCase, self).setUp() + + def testCreateCouchDBRiver(self): + """ + Testing deleting a river + """ + test_river = CouchDBRiver(index_name='text_index', index_type='test_type') + result = self.conn.create_river(test_river, river_name='test_index') + print result + self.assertResultContains(result, {'ok': True}) + + def testDeleteCouchDBRiver(self): + """ + Testing deleting a river + """ + test_river = CouchDBRiver(index_name='text_index', index_type='test_type') + result = self.conn.delete_river(test_river, river_name='test_index') + print result + self.assertResultContains(result, {'ok': True}) + + def testCreateRabbitMQRiver(self): + """ + Testing deleting a river + """ + test_river = RabbitMQRiver(index_name='text_index', index_type='test_type') + result = self.conn.create_river(test_river, river_name='test_index') + print result + self.assertResultContains(result, {'ok': True}) + + def testDeleteRabbitMQRiver(self): + """ + Testing deleting a river + """ + test_river = RabbitMQRiver(index_name='text_index', index_type='test_type') + result = self.conn.delete_river(test_river, river_name='test_index') + print result + self.assertResultContains(result, {'ok': True}) + + def testCreateTwitterRiver(self): + """ + Testing deleting a river + """ + test_river = TwitterRiver('test', 'test', index_name='text_index', index_type='test_type') + result = self.conn.create_river(test_river, river_name='test_index') + print result + self.assertResultContains(result, {'ok': True}) + + def testDeleteTwitterRiver(self): + """ + Testing deleting a river + """ + test_river = TwitterRiver('test', 'test', index_name='text_index', index_type='test_type') + result = self.conn.delete_river(test_river, river_name='test_index') + print result + self.assertResultContains(result, {'ok': True}) + +if __name__ == "__main__": + unittest.main() + diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_scriptfields.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_scriptfields.py new file mode 100644 index 0000000000..d2e23e9ed4 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_scriptfields.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import unittest +from .. import scriptfields + +class ScriptFieldsTest(unittest.TestCase): + def test_scriptfieldserror_imported(self): + self.assertTrue(hasattr(scriptfields, 'ScriptFieldsError')) + + +if __name__ == '__main__': + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_serialize.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_serialize.py new file mode 100644 index 0000000000..cf72cf7362 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_serialize.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import unittest +from .estestcase import ESTestCase +from ..query import TermQuery, RangeQuery +from ..utils import ESRange +from datetime import datetime + +class SerializationTestCase(ESTestCase): + def setUp(self): + super(SerializationTestCase, self).setUp() + mapping = {u'parsedtext': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'name': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'title': {'boost': 1.0, + 'index': 'analyzed', + 'store': 'yes', + 'type': u'string', + "term_vector": "with_positions_offsets"}, + u'pos': {'store': 'yes', + 'type': u'integer'}, + u'inserted': {'store': 'yes', + 'type': u'date'}, + u'uuid': {'boost': 1.0, + 'index': 'not_analyzed', + 'store': 'yes', + 'type': u'string'}} + self.conn.create_index(self.index_name) + self.conn.put_mapping(self.document_type, {'properties': mapping}, self.index_name) + self.conn.index({"name": "Joe Tester", "parsedtext": "Joe Testere nice guy", "uuid": "11111", "position": 1, + 'inserted': datetime(2010, 10, 22, 12, 12, 12)}, self.index_name, self.document_type, 1) + self.conn.index({"name": "Bill Baloney", "parsedtext": "Joe Testere nice guy", "uuid": "22222", "position": 2, + 'inserted': datetime(2010, 10, 22, 12, 12, 10)}, self.index_name, self.document_type, 2) + self.conn.index({"name": "Jesus H Christ", "parsedtext": "Bible guy", "uuid": "33333", "position": 3, + 'inserted': datetime(1, 1, 1, 0, 0, 0)}, self.index_name, self.document_type, 3) + self.conn.refresh(self.index_name) + + def test_TermQuery(self): + q = TermQuery("name", "joe") + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + hit = resultset[0] + self.assertEquals(hit.inserted, datetime(2010, 10, 22, 12, 12, 12)) + + def test_DateBefore1900(self): + q = RangeQuery(ESRange("inserted", datetime(1, 1, 1), datetime(2, 1, 1))) + resultset = self.conn.search(query=q, indices=self.index_name) + self.assertEquals(resultset.total, 1) + hit = resultset[0] + self.assertEquals(hit.inserted, datetime(1, 1, 1, 0, 0, 0)) + + +if __name__ == "__main__": + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/tests/test_utils.py b/src/archivematicaCommon/lib/externals/pyes/tests/test_utils.py new file mode 100644 index 0000000000..2e632902f7 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/tests/test_utils.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import unittest +from .estestcase import ESTestCase +from ..utils import clean_string +from ..es import ES + +class UtilsTestCase(ESTestCase): + def test_cleanstring(self): + self.assertEquals(clean_string("senthil("), "senthil") + self.assertEquals(clean_string("senthil&"), "senthil") + self.assertEquals(clean_string("senthil-"), "senthil") + self.assertEquals(clean_string("senthil:"), "senthil") + + def test_servers(self): + geturls = lambda servers: [server.geturl() for server in servers] + es = ES("127.0.0.1:9200") + self.assertEquals(geturls(es.servers), ["http://127.0.0.1:9200"]) + es = ES("127.0.0.1:9500") + self.assertEquals(geturls(es.servers), ["thrift://127.0.0.1:9500"]) + es = ES(("http", "127.0.0.1", 9400)) + self.assertEquals(geturls(es.servers), ["http://127.0.0.1:9400"]) + es = ES(("thrift", "127.0.0.1", 9100)) + self.assertEquals(geturls(es.servers), ["thrift://127.0.0.1:9100"]) + es = ES(["http://127.0.0.1:9100", + "127.0.0.1:9200", + ("thrift", "127.0.0.1", 9000), + "127.0.0.1:9500", + ]) + self.assertEquals(geturls(sorted(es.servers)), + ["http://127.0.0.1:9100", + "http://127.0.0.1:9200", + "thrift://127.0.0.1:9000", + "thrift://127.0.0.1:9500"]) + + +if __name__ == "__main__": + unittest.main() diff --git a/src/archivematicaCommon/lib/externals/pyes/utils.py b/src/archivematicaCommon/lib/externals/pyes/utils.py new file mode 100644 index 0000000000..fcf86cbcc5 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/pyes/utils.py @@ -0,0 +1,105 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +import base64 + +__all__ = ['clean_string', "ESRange", "ESRangeOp", "string_b64encode", "string_b64decode"] + +def string_b64encode(s): + """ + This function is useful to convert a string to a valid id to be used in ES. + You can use it to generate an ID for urls or some texts + """ + return base64.urlsafe_b64encode(s).strip('=') + + +def string_b64decode(s): + return base64.urlsafe_b64decode(s + '=' * (len(s) % 4)) + +# Characters that are part of Lucene query syntax must be stripped +# from user input: + - && || ! ( ) { } [ ] ^ " ~ * ? : \ +# See: http://lucene.apache.org/java/3_0_2/queryparsersyntax.html#Escaping +SPECIAL_CHARS = [33, 34, 38, 40, 41, 42, 45, 58, 63, 91, 92, 93, 94, 123, 124, 125, 126] +UNI_SPECIAL_CHARS = dict((c, None) for c in SPECIAL_CHARS) +STR_SPECIAL_CHARS = ''.join([chr(c) for c in SPECIAL_CHARS]) + +class EqualityComparableUsingAttributeDictionary(object): + """ + Instances of classes inheriting from this class can be compared + using their attribute dictionary (__dict__). See GitHub issue + 128 and http://stackoverflow.com/q/390640 + """ + + def __eq__(self, other): + if type(other) is type(self): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other): + return not self == other + + +class ESRange(EqualityComparableUsingAttributeDictionary): + def __init__(self, field, from_value=None, to_value=None, include_lower=None, + include_upper=None, boost=None, **kwargs): + self.field = field + self.from_value = from_value + self.to_value = to_value + self.include_lower = include_lower + self.include_upper = include_upper + self.boost = boost + + def serialize(self): + filters = {} + if self.from_value is not None: + filters['from'] = self.from_value + if self.to_value is not None: + filters['to'] = self.to_value + if self.include_lower is not None: + filters['include_lower'] = self.include_lower + if self.include_upper is not None: + filters['include_upper'] = self.include_upper + if self.boost is not None: + filters['boost'] = self.boost + return self.field, filters + + +class ESRangeOp(ESRange): + def __init__(self, field, op, value, boost=None): + from_value = to_value = include_lower = include_upper = None + if op == "gt": + from_value = value + include_lower = False + elif op == "gte": + from_value = value + include_lower = True + if op == "lt": + to_value = value + include_upper = False + elif op == "lte": + to_value = value + include_upper = True + super(ESRangeOp, self).__init__(field, from_value, to_value, + include_lower, include_upper, boost) + + +def clean_string(text): + """ + Remove Lucene reserved characters from query string + """ + if isinstance(text, unicode): + return text.translate(UNI_SPECIAL_CHARS).strip() + return text.translate(None, STR_SPECIAL_CHARS).strip() + + +def keys_to_string(data): + """ + Function to convert all the unicode keys in string keys + """ + if isinstance(data, dict): + for key in list(data.keys()): + if isinstance(key, unicode): + value = data[key] + val = keys_to_string(value) + del data[key] + data[key.encode("utf8", "ignore")] = val + return data diff --git a/src/archivematicaCommon/lib/externals/requests/__init__.py b/src/archivematicaCommon/lib/externals/requests/__init__.py new file mode 100644 index 0000000000..e09c834591 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- + +# __ +# /__) _ _ _ _ _/ _ +# / ( (- (/ (/ (- _) / _) +# / + +""" +requests +~~~~~~~~ + +:copyright: (c) 2011 by Kenneth Reitz. +:license: ISC, see LICENSE for more details. + +""" + +__title__ = 'requests' +__version__ = '0.8.3' +__build__ = 0x000803 +__author__ = 'Kenneth Reitz' +__license__ = 'ISC' +__copyright__ = 'Copyright 2011 Kenneth Reitz' + + +from . import utils +from .models import Request, Response +from .api import request, get, head, post, patch, put, delete, options +from .sessions import session, Session +from .status_codes import codes +from .exceptions import ( + RequestException, Timeout, URLRequired, + TooManyRedirects, HTTPError, ConnectionError +) diff --git a/src/archivematicaCommon/lib/externals/requests/api.py b/src/archivematicaCommon/lib/externals/requests/api.py new file mode 100644 index 0000000000..0b124e0312 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/api.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- + +""" +requests.api +~~~~~~~~~~~~ + +This module implements the Requests API. + +:copyright: (c) 2011 by Kenneth Reitz. +:license: ISC, see LICENSE for more details. + +""" + +from . import sessions + + +def request(method, url, + params=None, + data=None, + headers=None, + cookies=None, + files=None, + auth=None, + timeout=None, + allow_redirects=False, + proxies=None, + hooks=None, + return_response=True, + prefetch=False, + session=None, + config=None): + """Constructs and sends a :class:`Request `. + Returns :class:`Response ` object. + + :param method: method for the new :class:`Request` object. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. + :param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. + :param files: (optional) Dictionary of 'name': file-like-objects (or {'name': ('filename', fileobj)}) for multipart encoding upload. + :param auth: (optional) Auth typle to enable Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) Float describing the timeout of the request. + :param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. + :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. + :param return_response: (optional) If False, an un-sent Request object will returned. + :param session: (optional) A :class:`Session` object to be used for the request. + :param config: (optional) A configuration dictionary. + """ + + s = session or sessions.session() + return s.request( + method=method, + url=url, + params=params, + data=data, + headers=headers, + cookies=cookies, + files=files, + auth=auth, + timeout=timeout, + allow_redirects=allow_redirects, + proxies=proxies, + hooks=hooks, + return_response=return_response, + config=config, + prefetch=prefetch + ) + + + +def get(url, **kwargs): + """Sends a GET request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param **kwargs: Optional arguments that ``request`` takes. + """ + + kwargs.setdefault('allow_redirects', True) + return request('get', url, **kwargs) + + +def options(url, **kwargs): + """Sends a OPTIONS request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param **kwargs: Optional arguments that ``request`` takes. + """ + + kwargs.setdefault('allow_redirects', True) + return request('options', url, **kwargs) + + +def head(url, **kwargs): + """Sends a HEAD request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param **kwargs: Optional arguments that ``request`` takes. + """ + + kwargs.setdefault('allow_redirects', True) + return request('head', url, **kwargs) + + +def post(url, data=None, **kwargs): + """Sends a POST request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. + :param **kwargs: Optional arguments that ``request`` takes. + """ + + return request('post', url, data=data, **kwargs) + + +def put(url, data=None, **kwargs): + """Sends a PUT request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. + :param **kwargs: Optional arguments that ``request`` takes. + """ + + return request('put', url, data=data, **kwargs) + + +def patch(url, data=None, **kwargs): + """Sends a PATCH request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. + :param **kwargs: Optional arguments that ``request`` takes. + """ + + return request('patch', url, data=data, **kwargs) + + +def delete(url, **kwargs): + """Sends a DELETE request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param **kwargs: Optional arguments that ``request`` takes. + """ + + return request('delete', url, **kwargs) diff --git a/src/archivematicaCommon/lib/externals/requests/async.py b/src/archivematicaCommon/lib/externals/requests/async.py new file mode 100644 index 0000000000..83a8286a09 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/async.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- + +""" +requests.async +~~~~~~~~~~~~~~ + +This module contains an asynchronous replica of ``requests.api``, powered +by gevent. All API methods return a ``Request`` instance (as opposed to +``Response``). A list of requests can be sent with ``map()``. +""" + +try: + import gevent + from gevent import monkey as curious_george + from gevent.pool import Pool +except ImportError: + raise RuntimeError('Gevent is required for requests.async.') + +# Monkey-patch. +curious_george.patch_all(thread=False) + +from . import api + + +__all__ = ( + 'map', + 'get', 'options', 'head', 'post', 'put', 'patch', 'delete', 'request' +) + + +def patched(f): + """Patches a given API function to not send.""" + + def wrapped(*args, **kwargs): + + kwargs['return_response'] = False + kwargs['prefetch'] = True + + return f(*args, **kwargs) + + return wrapped + + +def send(r, pools=None): + """Sends a given Request object.""" + + if pools: + r._pools = pools + + r.send() + + return r.response + + +# Patched requests.api functions. +get = patched(api.get) +options = patched(api.options) +head = patched(api.head) +post = patched(api.post) +put = patched(api.put) +patch = patched(api.patch) +delete = patched(api.delete) +request = patched(api.request) + + +def map(requests, prefetch=True, size=None): + """Concurrently converts a list of Requests to Responses. + + :param requests: a collection of Request objects. + :param prefetch: If False, the content will not be downloaded immediately. + :param size: Specifies the number of requests to make at a time. If None, no throttling occurs. + """ + + requests = list(requests) + + if size: + pool = Pool(size) + pool.map(send, requests) + pool.join() + else: + jobs = [gevent.spawn(send, r) for r in requests] + gevent.joinall(jobs) + + if prefetch: + [r.response.content for r in requests] + + return [r.response for r in requests] diff --git a/src/archivematicaCommon/lib/externals/requests/auth.py b/src/archivematicaCommon/lib/externals/requests/auth.py new file mode 100644 index 0000000000..fad6eb7969 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/auth.py @@ -0,0 +1,124 @@ +# -*- coding: utf-8 -*- + +""" +requests.auth +~~~~~~~~~~~~~ + +This module contains the authentication handlers for Requests. +""" + +import time +import hashlib + +from base64 import b64encode +from urlparse import urlparse + +from .utils import randombytes, parse_dict_header + + +class AuthBase(object): + """Base class that all auth implementations derive from""" + + def __call__(self, r): + raise NotImplementedError('Auth hooks must be callable.') + + +class HTTPBasicAuth(AuthBase): + """Attaches HTTP Basic Authentication to the given Request object.""" + def __init__(self, username, password): + self.username = str(username) + self.password = str(password) + + def __call__(self, r): + auth_s = b64encode('%s:%s' % (self.username, self.password)) + r.headers['Authorization'] = ('Basic %s' % auth_s) + return r + + +class HTTPDigestAuth(AuthBase): + """Attaches HTTP Digest Authentication to the given Request object.""" + def __init__(self, username, password): + self.username = username + self.password = password + + def handle_401(self, r): + """Takes the given response and tries digest-auth, if needed.""" + + s_auth = r.headers.get('www-authenticate', '') + + if 'digest' in s_auth.lower(): + + last_nonce = '' + nonce_count = 0 + + chal = parse_dict_header(s_auth.replace('Digest ', '')) + + realm = chal['realm'] + nonce = chal['nonce'] + qop = chal.get('qop') + algorithm = chal.get('algorithm', 'MD5') + opaque = chal.get('opaque', None) + + algorithm = algorithm.upper() + # lambdas assume digest modules are imported at the top level + if algorithm == 'MD5': + H = lambda x: hashlib.md5(x).hexdigest() + elif algorithm == 'SHA': + H = lambda x: hashlib.sha1(x).hexdigest() + # XXX MD5-sess + KD = lambda s, d: H("%s:%s" % (s, d)) + + if H is None: + return None + + # XXX not implemented yet + entdig = None + p_parsed = urlparse(r.request.url) + path = p_parsed.path + p_parsed.query + + A1 = "%s:%s:%s" % (self.username, realm, self.password) + A2 = "%s:%s" % (r.request.method, path) + + if qop == 'auth': + if nonce == last_nonce: + nonce_count += 1 + else: + nonce_count = 1 + last_nonce = nonce + + ncvalue = '%08x' % nonce_count + cnonce = (hashlib.sha1("%s:%s:%s:%s" % ( + nonce_count, nonce, time.ctime(), randombytes(8))) + .hexdigest()[:16] + ) + noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2)) + respdig = KD(H(A1), noncebit) + elif qop is None: + respdig = KD(H(A1), "%s:%s" % (nonce, H(A2))) + else: + # XXX handle auth-int. + return None + + # XXX should the partial digests be encoded too? + base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ + 'response="%s"' % (self.username, realm, nonce, path, respdig) + if opaque: + base += ', opaque="%s"' % opaque + if entdig: + base += ', digest="%s"' % entdig + base += ', algorithm="%s"' % algorithm + if qop: + base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce) + + r.request.headers['Authorization'] = 'Digest %s' % (base) + r.request.send(anyway=True) + _r = r.request.response + _r.history.append(r) + + return _r + + return r + + def __call__(self, r): + r.hooks['response'] = self.handle_401 + return r diff --git a/src/archivematicaCommon/lib/externals/requests/defaults.py b/src/archivematicaCommon/lib/externals/requests/defaults.py new file mode 100644 index 0000000000..7a5a3fb8df --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/defaults.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- + +""" +requests.defaults +~~~~~~~~~~~~~~~~~ + +This module provides the Requests configuration defaults. + +Configurations: + +:base_headers: Default HTTP headers. +:verbose: Stream to write request logging to. +:timeout: Seconds until request timeout. +:max_redirects: Maximum njumber of redirects allowed within a request. +:decode_unicode: Decode unicode responses automatically? +:keep_alive: Reuse HTTP Connections? +:max_retries: The number of times a request should be retried in the event of a connection failure. +:safe_mode: If true, Requests will catch all errors. +:pool_maxsize: The maximium size of an HTTP connection pool. +:pool_connections: The number of active HTTP connection pools to use. + +""" + +from . import __version__ + +defaults = dict() + + +defaults['base_headers'] = { + 'User-Agent': 'python-requests/%s' % __version__, + 'Accept-Encoding': ', '.join(('identity', 'deflate', 'compress', 'gzip')), + 'Accept': '*/*' +} + +defaults['verbose'] = None +defaults['max_redirects'] = 30 +defaults['decode_unicode'] = True +defaults['pool_connections'] = 10 +defaults['pool_maxsize'] = 10 +defaults['max_retries'] = 0 +defaults['safe_mode'] = False +defaults['keep_alive'] = True diff --git a/src/archivematicaCommon/lib/externals/requests/exceptions.py b/src/archivematicaCommon/lib/externals/requests/exceptions.py new file mode 100644 index 0000000000..d20a95cd64 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/exceptions.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- + +""" +requests.exceptions +~~~~~~~~~~~~~~~~~~~ + +This module contains the set of Requests' exceptions. + +""" + +class RequestException(Exception): + """There was an ambiguous exception that occurred while handling your + request.""" + +class HTTPError(RequestException): + """An HTTP error occured.""" + +class ConnectionError(RequestException): + """A Connection error occured.""" + +class Timeout(RequestException): + """The request timed out.""" + +class URLRequired(RequestException): + """A valid URL is required to make a request.""" + +class TooManyRedirects(RequestException): + """Too many redirects.""" diff --git a/src/archivematicaCommon/lib/externals/requests/hooks.py b/src/archivematicaCommon/lib/externals/requests/hooks.py new file mode 100644 index 0000000000..f9cf4808be --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/hooks.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- + +""" +requests.hooks +~~~~~~~~~~~~~~ + +This module provides the capabilities for the Requests hooks system. + +Available hooks: + +``args``: + A dictionary of the arguments being sent to Request(). + +``pre_request``: + The Request object, directly before being sent. + +``post_request``: + The Request object, directly after being sent. + +``response``: + The response generated from a Request. + +""" + +import warnings + + +def dispatch_hook(key, hooks, hook_data): + """Dispatches a hook dictionary on a given piece of data.""" + + hooks = hooks or dict() + + if key in hooks: + try: + return hooks.get(key).__call__(hook_data) or hook_data + + except Exception, why: + warnings.warn(str(why)) + + return hook_data diff --git a/src/archivematicaCommon/lib/externals/requests/models.py b/src/archivematicaCommon/lib/externals/requests/models.py new file mode 100644 index 0000000000..8b18cd84f0 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/models.py @@ -0,0 +1,634 @@ +# -*- coding: utf-8 -*- + +""" +requests.models +~~~~~~~~~~~~~~~ + +This module contains the primary objects that power Requests. +""" + +import urllib +import zlib + +from urlparse import urlparse, urlunparse, urljoin, urlsplit +from datetime import datetime + +from .hooks import dispatch_hook +from .structures import CaseInsensitiveDict +from .status_codes import codes +from .packages import oreos +from .auth import HTTPBasicAuth +from .packages.urllib3.exceptions import MaxRetryError +from .packages.urllib3.exceptions import SSLError as _SSLError +from .packages.urllib3.exceptions import HTTPError as _HTTPError +from .packages.urllib3 import connectionpool, poolmanager +from .packages.urllib3.filepost import encode_multipart_formdata +from .exceptions import ( + Timeout, URLRequired, TooManyRedirects, HTTPError, ConnectionError) +from .utils import ( + get_encoding_from_headers, stream_decode_response_unicode, + decode_gzip, stream_decode_gzip, guess_filename, requote_path) + + +REDIRECT_STATI = (codes.moved, codes.found, codes.other, codes.temporary_moved) + + + +class Request(object): + """The :class:`Request ` object. It carries out all functionality of + Requests. Recommended interface is with the Requests functions. + """ + + def __init__(self, + url=None, + headers=dict(), + files=None, + method=None, + data=dict(), + params=dict(), + auth=None, + cookies=None, + timeout=None, + redirect=False, + allow_redirects=False, + proxies=None, + hooks=None, + config=None, + _poolmanager=None): + + #: Float describes the timeout of the request. + # (Use socket.setdefaulttimeout() as fallback) + self.timeout = timeout + + #: Request URL. + self.url = url + + #: Dictionary of HTTP Headers to attach to the :class:`Request `. + self.headers = dict(headers or []) + + #: Dictionary of files to multipart upload (``{filename: content}``). + self.files = files + + #: HTTP Method to use. + self.method = method + + #: Dictionary or byte of request body data to attach to the + #: :class:`Request `. + self.data = None + + #: Dictionary or byte of querystring data to attach to the + #: :class:`Request `. + self.params = None + self.params = dict(params or []) + + #: True if :class:`Request ` is part of a redirect chain (disables history + #: and HTTPError storage). + self.redirect = redirect + + #: Set to True if full redirects are allowed (e.g. re-POST-ing of data at new ``Location``) + self.allow_redirects = allow_redirects + + # Dictionary mapping protocol to the URL of the proxy (e.g. {'http': 'foo.bar:3128'}) + self.proxies = dict(proxies or []) + + self.data, self._enc_data = self._encode_params(data) + self.params, self._enc_params = self._encode_params(params) + + #: :class:`Response ` instance, containing + #: content and metadata of HTTP Response, once :attr:`sent `. + self.response = Response() + + #: Authentication tuple or object to attach to :class:`Request `. + self.auth = auth + + #: CookieJar to attach to :class:`Request `. + self.cookies = dict(cookies or []) + + #: Dictionary of configurations for this request. + self.config = dict(config or []) + + #: True if Request has been sent. + self.sent = False + + #: Event-handling hooks. + self.hooks = hooks + + #: Session. + self.session = None + + if headers: + headers = CaseInsensitiveDict(self.headers) + else: + headers = CaseInsensitiveDict() + + for (k, v) in self.config.get('base_headers', {}).items(): + if k not in headers: + headers[k] = v + + self.headers = headers + self._poolmanager = _poolmanager + + # Pre-request hook. + r = dispatch_hook('pre_request', hooks, self) + self.__dict__.update(r.__dict__) + + + def __repr__(self): + return '' % (self.method) + + + def _build_response(self, resp, is_error=False): + """Build internal :class:`Response ` object + from given response. + """ + + def build(resp): + + response = Response() + + # Pass settings over. + response.config = self.config + + if resp: + + # Fallback to None if there's no staus_code, for whatever reason. + response.status_code = getattr(resp, 'status', None) + + # Make headers case-insensitive. + response.headers = CaseInsensitiveDict(getattr(resp, 'headers', None)) + + # Set encoding. + response.encoding = get_encoding_from_headers(response.headers) + + # Start off with our local cookies. + cookies = self.cookies or dict() + + # Add new cookies from the server. + if 'set-cookie' in response.headers: + cookie_header = response.headers['set-cookie'] + cookies = oreos.dict_from_string(cookie_header) + + # Save cookies in Response. + response.cookies = cookies + + # Save original resopnse for later. + response.raw = resp + + if is_error: + response.error = resp + + response.url = self.full_url + + return response + + history = [] + + r = build(resp) + cookies = self.cookies + self.cookies.update(r.cookies) + + if r.status_code in REDIRECT_STATI and not self.redirect: + + while ( + ('location' in r.headers) and + ((r.status_code is codes.see_other) or (self.allow_redirects)) + ): + + if not len(history) < self.config.get('max_redirects'): + raise TooManyRedirects() + + history.append(r) + + url = r.headers['location'] + + # Handle redirection without scheme (see: RFC 1808 Section 4) + if url.startswith('//'): + parsed_rurl = urlparse(r.url) + url = '%s:%s' % (parsed_rurl.scheme, url) + + # Facilitate non-RFC2616-compliant 'location' headers + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + if not urlparse(url).netloc: + url = urljoin(r.url, url) + + # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4 + if r.status_code is codes.see_other: + method = 'GET' + else: + method = self.method + + # Remove the cookie headers that were sent. + headers = self.headers + try: + del headers['Cookie'] + except KeyError: + pass + + request = Request( + url=url, + headers=headers, + files=self.files, + method=method, + params=self.session.params, + auth=self.auth, + cookies=cookies, + redirect=True, + config=self.config, + timeout=self.timeout, + _poolmanager=self._poolmanager, + proxies = self.proxies, + ) + + request.send() + cookies.update(request.response.cookies) + r = request.response + self.cookies.update(r.cookies) + + r.history = history + + self.response = r + self.response.request = self + self.response.cookies.update(self.cookies) + + + @staticmethod + def _encode_params(data): + """Encode parameters in a piece of data. + + If the data supplied is a dictionary, encodes each parameter in it, and + returns a list of tuples containing the encoded parameters, and a urlencoded + version of that. + + Otherwise, assumes the data is already encoded appropriately, and + returns it twice. + """ + + if hasattr(data, '__iter__'): + data = dict(data) + + if hasattr(data, 'items'): + result = [] + for k, vs in data.items(): + for v in isinstance(vs, list) and vs or [vs]: + result.append((k.encode('utf-8') if isinstance(k, unicode) else k, + v.encode('utf-8') if isinstance(v, unicode) else v)) + return result, urllib.urlencode(result, doseq=True) + else: + return data, data + + @property + def full_url(self): + """Build the actual URL to use.""" + + if not self.url: + raise URLRequired() + + # Support for unicode domain names and paths. + scheme, netloc, path, params, query, fragment = urlparse(self.url) + + if not scheme: + raise ValueError() + + netloc = netloc.encode('idna') + + if isinstance(path, unicode): + path = path.encode('utf-8') + + path = requote_path(path) + + url = str(urlunparse([ scheme, netloc, path, params, query, fragment ])) + + if self._enc_params: + if urlparse(url).query: + return '%s&%s' % (url, self._enc_params) + else: + return '%s?%s' % (url, self._enc_params) + else: + return url + + @property + def path_url(self): + """Build the path URL to use.""" + + url = [] + + p = urlsplit(self.full_url) + + # Proxies use full URLs. + if p.scheme in self.proxies: + return self.full_url + + path = p.path + if not path: + path = '/' + url.append(path) + + query = p.query + if query: + url.append('?') + url.append(query) + + return ''.join(url) + + + + def send(self, anyway=False, prefetch=False): + """Sends the request. Returns True of successful, false if not. + If there was an HTTPError during transmission, + self.response.status_code will contain the HTTPError code. + + Once a request is successfully sent, `sent` will equal True. + + :param anyway: If True, request will be sent, even if it has + already been sent. + """ + + # Build the URL + url = self.full_url + + # Logging + if self.config.get('verbose'): + self.config.get('verbose').write('%s %s %s\n' % ( + datetime.now().isoformat(), self.method, url + )) + + # Nottin' on you. + body = None + content_type = None + + # Multi-part file uploads. + if self.files: + if not isinstance(self.data, basestring): + + try: + fields = self.data.copy() + except AttributeError: + fields = dict(self.data) + + for (k, v) in self.files.items(): + # support for explicit filename + if isinstance(v, (tuple, list)): + fn, fp = v + else: + fn = guess_filename(v) or k + fp = v + fields.update({k: (fn, fp.read())}) + + (body, content_type) = encode_multipart_formdata(fields) + else: + pass + # TODO: Conflict? + else: + if self.data: + + body = self._enc_data + if isinstance(self.data, basestring): + content_type = None + else: + content_type = 'application/x-www-form-urlencoded' + + # Add content-type if it wasn't explicitly provided. + if (content_type) and (not 'content-type' in self.headers): + self.headers['Content-Type'] = content_type + + if self.auth: + if isinstance(self.auth, tuple) and len(self.auth) == 2: + # special-case basic HTTP auth + self.auth = HTTPBasicAuth(*self.auth) + + # Allow auth to make its changes. + r = self.auth(self) + + # Update self to reflect the auth changes. + self.__dict__.update(r.__dict__) + + _p = urlparse(url) + proxy = self.proxies.get(_p.scheme) + + if proxy: + conn = poolmanager.proxy_from_url(proxy) + else: + # Check to see if keep_alive is allowed. + if self.config.get('keep_alive'): + conn = self._poolmanager.connection_from_url(url) + else: + conn = connectionpool.connection_from_url(url) + + if not self.sent or anyway: + + if self.cookies: + + # Skip if 'cookie' header is explicitly set. + if 'cookie' not in self.headers: + + # Simple cookie with our dict. + c = oreos.monkeys.SimpleCookie() + for (k, v) in self.cookies.items(): + c[k] = v + + # Turn it into a header. + cookie_header = c.output(header='', sep='; ').strip() + + # Attach Cookie header to request. + self.headers['Cookie'] = cookie_header + + try: + # Send the request. + r = conn.urlopen( + method=self.method, + url=self.path_url, + body=body, + headers=self.headers, + redirect=False, + assert_same_host=False, + preload_content=prefetch, + decode_content=False, + retries=self.config.get('max_retries', 0), + timeout=self.timeout, + ) + self.sent = True + + + except MaxRetryError, e: + if not self.config.get('safe_mode', False): + raise ConnectionError(e) + else: + r = None + + except (_SSLError, _HTTPError), e: + if not self.config.get('safe_mode', False): + raise Timeout('Request timed out.') + + self._build_response(r) + + # Response manipulation hook. + self.response = dispatch_hook('response', self.hooks, self.response) + + # Post-request hook. + r = dispatch_hook('post_request', self.hooks, self) + self.__dict__.update(r.__dict__) + + # If prefetch is True, mark content as consumed. + if prefetch: + self.response._content_consumed = True + + return self.sent + + +class Response(object): + """The core :class:`Response ` object. All + :class:`Request ` objects contain a + :class:`response ` attribute, which is an instance + of this class. + """ + + def __init__(self): + + self._content = None + self._content_consumed = False + + #: Integer Code of responded HTTP Status. + self.status_code = None + + #: Case-insensitive Dictionary of Response Headers. + #: For example, ``headers['content-encoding']`` will return the + #: value of a ``'Content-Encoding'`` response header. + self.headers = CaseInsensitiveDict() + + #: File-like object representation of response (for advanced usage). + self.raw = None + + #: Final URL location of Response. + self.url = None + + #: Resulting :class:`HTTPError` of request, if one occurred. + self.error = None + + #: Encoding to decode with when accessing r.content. + self.encoding = None + + #: A list of :class:`Response ` objects from + #: the history of the Request. Any redirect responses will end + #: up here. + self.history = [] + + #: The :class:`Request ` that created the Response. + self.request = None + + #: A dictionary of Cookies the server sent back. + self.cookies = {} + + #: Dictionary of configurations for this request. + self.config = {} + + + def __repr__(self): + return '' % (self.status_code) + + def __nonzero__(self): + """Returns true if :attr:`status_code` is 'OK'.""" + return self.ok + + @property + def ok(self): + try: + self.raise_for_status() + except HTTPError: + return False + return True + + + def iter_content(self, chunk_size=10 * 1024, decode_unicode=None): + """Iterates over the response data. This avoids reading the content + at once into memory for large responses. The chunk size is the number + of bytes it should read into memory. This is not necessarily the + length of each item returned as decoding can take place. + """ + if self._content_consumed: + raise RuntimeError( + 'The content for this response was already consumed' + ) + + def generate(): + while 1: + chunk = self.raw.read(chunk_size) + if not chunk: + break + yield chunk + self._content_consumed = True + + gen = generate() + + if 'gzip' in self.headers.get('content-encoding', ''): + gen = stream_decode_gzip(gen) + + if decode_unicode is None: + decode_unicode = self.config.get('decode_unicode') + + if decode_unicode: + gen = stream_decode_response_unicode(gen, self) + + return gen + + + @property + def content(self): + """Content of the response, in bytes or unicode + (if available). + """ + + if self._content is None: + # Read the contents. + try: + if self._content_consumed: + raise RuntimeError( + 'The content for this response was already consumed') + + self._content = self.raw.read() + except AttributeError: + self._content = None + + content = self._content + + # Decode GZip'd content. + if 'gzip' in self.headers.get('content-encoding', ''): + try: + content = decode_gzip(self._content) + except zlib.error: + pass + + # Decode unicode content. + if self.config.get('decode_unicode'): + + # Try charset from content-type + + if self.encoding: + try: + content = unicode(content, self.encoding) + except UnicodeError: + pass + + # Fall back: + try: + content = unicode(content, self.encoding, errors='replace') + except TypeError: + pass + + self._content_consumed = True + return content + + + def raise_for_status(self): + """Raises stored :class:`HTTPError` or :class:`URLError`, if one occurred.""" + + if self.error: + raise self.error + + if (self.status_code >= 300) and (self.status_code < 400): + raise HTTPError('%s Redirection' % self.status_code) + + elif (self.status_code >= 400) and (self.status_code < 500): + raise HTTPError('%s Client Error' % self.status_code) + + elif (self.status_code >= 500) and (self.status_code < 600): + raise HTTPError('%s Server Error' % self.status_code) diff --git a/src/archivematicaCommon/lib/externals/requests/packages/__init__.py b/src/archivematicaCommon/lib/externals/requests/packages/__init__.py new file mode 100644 index 0000000000..d62c4b7111 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/packages/__init__.py @@ -0,0 +1,3 @@ +from __future__ import absolute_import + +from . import urllib3 diff --git a/src/archivematicaCommon/lib/externals/requests/packages/oreos/__init__.py b/src/archivematicaCommon/lib/externals/requests/packages/oreos/__init__.py new file mode 100644 index 0000000000..e0038b3aed --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/packages/oreos/__init__.py @@ -0,0 +1,3 @@ +# -*- coding: utf-8 -*- + +from .core import dict_from_string diff --git a/src/archivematicaCommon/lib/externals/requests/packages/oreos/core.py b/src/archivematicaCommon/lib/externals/requests/packages/oreos/core.py new file mode 100644 index 0000000000..4e0e5d3fd5 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/packages/oreos/core.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +""" +oreos.core +~~~~~~~~~~ + +The creamy white center. +""" + +from .monkeys import SimpleCookie + + +def dict_from_string(s): + """Returns a MultiDict with Cookies.""" + + cookies = dict() + + c = SimpleCookie() + c.load(s) + + for k,v in c.items(): + cookies.update({k: v.value}) + + return cookies diff --git a/src/archivematicaCommon/lib/externals/requests/packages/oreos/monkeys.py b/src/archivematicaCommon/lib/externals/requests/packages/oreos/monkeys.py new file mode 100644 index 0000000000..6be3074c44 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/packages/oreos/monkeys.py @@ -0,0 +1,770 @@ +# -*- coding: utf-8 -*- + +""" +oreos.monkeys +~~~~~~~~~~~~~ + +Monkeypatches. +""" +#!/usr/bin/env python +# + +#### +# Copyright 2000 by Timothy O'Malley +# +# All Rights Reserved +# +# Permission to use, copy, modify, and distribute this software +# and its documentation for any purpose and without fee is hereby +# granted, provided that the above copyright notice appear in all +# copies and that both that copyright notice and this permission +# notice appear in supporting documentation, and that the name of +# Timothy O'Malley not be used in advertising or publicity +# pertaining to distribution of the software without specific, written +# prior permission. +# +# Timothy O'Malley DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS +# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL Timothy O'Malley BE LIABLE FOR +# ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +#### +# +# Id: Cookie.py,v 2.29 2000/08/23 05:28:49 timo Exp +# by Timothy O'Malley +# +# Cookie.py is a Python module for the handling of HTTP +# cookies as a Python dictionary. See RFC 2109 for more +# information on cookies. +# +# The original idea to treat Cookies as a dictionary came from +# Dave Mitchell (davem@magnet.com) in 1995, when he released the +# first version of nscookie.py. +# +#### + +r""" +Here's a sample session to show how to use this module. +At the moment, this is the only documentation. + +The Basics +---------- + +Importing is easy.. + + >>> import Cookie + +Most of the time you start by creating a cookie. Cookies come in +three flavors, each with slightly different encoding semantics, but +more on that later. + + >>> C = Cookie.SimpleCookie() + >>> C = Cookie.SerialCookie() + >>> C = Cookie.SmartCookie() + +[Note: Long-time users of Cookie.py will remember using +Cookie.Cookie() to create an Cookie object. Although deprecated, it +is still supported by the code. See the Backward Compatibility notes +for more information.] + +Once you've created your Cookie, you can add values just as if it were +a dictionary. + + >>> C = Cookie.SmartCookie() + >>> C["fig"] = "newton" + >>> C["sugar"] = "wafer" + >>> C.output() + 'Set-Cookie: fig=newton\r\nSet-Cookie: sugar=wafer' + +Notice that the printable representation of a Cookie is the +appropriate format for a Set-Cookie: header. This is the +default behavior. You can change the header and printed +attributes by using the .output() function + + >>> C = Cookie.SmartCookie() + >>> C["rocky"] = "road" + >>> C["rocky"]["path"] = "/cookie" + >>> print C.output(header="Cookie:") + Cookie: rocky=road; Path=/cookie + >>> print C.output(attrs=[], header="Cookie:") + Cookie: rocky=road + +The load() method of a Cookie extracts cookies from a string. In a +CGI script, you would use this method to extract the cookies from the +HTTP_COOKIE environment variable. + + >>> C = Cookie.SmartCookie() + >>> C.load("chips=ahoy; vienna=finger") + >>> C.output() + 'Set-Cookie: chips=ahoy\r\nSet-Cookie: vienna=finger' + +The load() method is darn-tootin smart about identifying cookies +within a string. Escaped quotation marks, nested semicolons, and other +such trickeries do not confuse it. + + >>> C = Cookie.SmartCookie() + >>> C.load('keebler="E=everybody; L=\\"Loves\\"; fudge=\\012;";') + >>> print C + Set-Cookie: keebler="E=everybody; L=\"Loves\"; fudge=\012;" + +Each element of the Cookie also supports all of the RFC 2109 +Cookie attributes. Here's an example which sets the Path +attribute. + + >>> C = Cookie.SmartCookie() + >>> C["oreo"] = "doublestuff" + >>> C["oreo"]["path"] = "/" + >>> print C + Set-Cookie: oreo=doublestuff; Path=/ + +Each dictionary element has a 'value' attribute, which gives you +back the value associated with the key. + + >>> C = Cookie.SmartCookie() + >>> C["twix"] = "none for you" + >>> C["twix"].value + 'none for you' + + +A Bit More Advanced +------------------- + +As mentioned before, there are three different flavors of Cookie +objects, each with different encoding/decoding semantics. This +section briefly discusses the differences. + +SimpleCookie + +The SimpleCookie expects that all values should be standard strings. +Just to be sure, SimpleCookie invokes the str() builtin to convert +the value to a string, when the values are set dictionary-style. + + >>> C = Cookie.SimpleCookie() + >>> C["number"] = 7 + >>> C["string"] = "seven" + >>> C["number"].value + '7' + >>> C["string"].value + 'seven' + >>> C.output() + 'Set-Cookie: number=7\r\nSet-Cookie: string=seven' + + +SerialCookie + +The SerialCookie expects that all values should be serialized using +cPickle (or pickle, if cPickle isn't available). As a result of +serializing, SerialCookie can save almost any Python object to a +value, and recover the exact same object when the cookie has been +returned. (SerialCookie can yield some strange-looking cookie +values, however.) + + >>> C = Cookie.SerialCookie() + >>> C["number"] = 7 + >>> C["string"] = "seven" + >>> C["number"].value + 7 + >>> C["string"].value + 'seven' + >>> C.output() + 'Set-Cookie: number="I7\\012."\r\nSet-Cookie: string="S\'seven\'\\012p1\\012."' + +Be warned, however, if SerialCookie cannot de-serialize a value (because +it isn't a valid pickle'd object), IT WILL RAISE AN EXCEPTION. + + +SmartCookie + +The SmartCookie combines aspects of each of the other two flavors. +When setting a value in a dictionary-fashion, the SmartCookie will +serialize (ala cPickle) the value *if and only if* it isn't a +Python string. String objects are *not* serialized. Similarly, +when the load() method parses out values, it attempts to de-serialize +the value. If it fails, then it fallsback to treating the value +as a string. + + >>> C = Cookie.SmartCookie() + >>> C["number"] = 7 + >>> C["string"] = "seven" + >>> C["number"].value + 7 + >>> C["string"].value + 'seven' + >>> C.output() + 'Set-Cookie: number="I7\\012."\r\nSet-Cookie: string=seven' + + +Backwards Compatibility +----------------------- + +In order to keep compatibilty with earlier versions of Cookie.py, +it is still possible to use Cookie.Cookie() to create a Cookie. In +fact, this simply returns a SmartCookie. + + >>> C = Cookie.Cookie() + >>> print C.__class__.__name__ + SmartCookie + + +Finis. +""" #" +# ^ +# |----helps out font-lock + +# +# Import our required modules +# +import string + +try: + from cPickle import dumps, loads +except ImportError: + from pickle import dumps, loads + +import re, warnings + +__all__ = ["CookieError","BaseCookie","SimpleCookie","SerialCookie", + "SmartCookie","Cookie"] + +_nulljoin = ''.join +_semispacejoin = '; '.join +_spacejoin = ' '.join + +# +# Define an exception visible to External modules +# +class CookieError(Exception): + pass + + +# These quoting routines conform to the RFC2109 specification, which in +# turn references the character definitions from RFC2068. They provide +# a two-way quoting algorithm. Any non-text character is translated +# into a 4 character sequence: a forward-slash followed by the +# three-digit octal equivalent of the character. Any '\' or '"' is +# quoted with a preceeding '\' slash. +# +# These are taken from RFC2068 and RFC2109. +# _LegalChars is the list of chars which don't require "'s +# _Translator hash-table for fast quoting +# +_LegalChars = string.ascii_letters + string.digits + "!#$%&'*+-.^_`|~[]_" +_Translator = { + '\000' : '\\000', '\001' : '\\001', '\002' : '\\002', + '\003' : '\\003', '\004' : '\\004', '\005' : '\\005', + '\006' : '\\006', '\007' : '\\007', '\010' : '\\010', + '\011' : '\\011', '\012' : '\\012', '\013' : '\\013', + '\014' : '\\014', '\015' : '\\015', '\016' : '\\016', + '\017' : '\\017', '\020' : '\\020', '\021' : '\\021', + '\022' : '\\022', '\023' : '\\023', '\024' : '\\024', + '\025' : '\\025', '\026' : '\\026', '\027' : '\\027', + '\030' : '\\030', '\031' : '\\031', '\032' : '\\032', + '\033' : '\\033', '\034' : '\\034', '\035' : '\\035', + '\036' : '\\036', '\037' : '\\037', + + # Because of the way browsers really handle cookies (as opposed + # to what the RFC says) we also encode , and ; + + ',' : '\\054', ';' : '\\073', + + '"' : '\\"', '\\' : '\\\\', + + '\177' : '\\177', '\200' : '\\200', '\201' : '\\201', + '\202' : '\\202', '\203' : '\\203', '\204' : '\\204', + '\205' : '\\205', '\206' : '\\206', '\207' : '\\207', + '\210' : '\\210', '\211' : '\\211', '\212' : '\\212', + '\213' : '\\213', '\214' : '\\214', '\215' : '\\215', + '\216' : '\\216', '\217' : '\\217', '\220' : '\\220', + '\221' : '\\221', '\222' : '\\222', '\223' : '\\223', + '\224' : '\\224', '\225' : '\\225', '\226' : '\\226', + '\227' : '\\227', '\230' : '\\230', '\231' : '\\231', + '\232' : '\\232', '\233' : '\\233', '\234' : '\\234', + '\235' : '\\235', '\236' : '\\236', '\237' : '\\237', + '\240' : '\\240', '\241' : '\\241', '\242' : '\\242', + '\243' : '\\243', '\244' : '\\244', '\245' : '\\245', + '\246' : '\\246', '\247' : '\\247', '\250' : '\\250', + '\251' : '\\251', '\252' : '\\252', '\253' : '\\253', + '\254' : '\\254', '\255' : '\\255', '\256' : '\\256', + '\257' : '\\257', '\260' : '\\260', '\261' : '\\261', + '\262' : '\\262', '\263' : '\\263', '\264' : '\\264', + '\265' : '\\265', '\266' : '\\266', '\267' : '\\267', + '\270' : '\\270', '\271' : '\\271', '\272' : '\\272', + '\273' : '\\273', '\274' : '\\274', '\275' : '\\275', + '\276' : '\\276', '\277' : '\\277', '\300' : '\\300', + '\301' : '\\301', '\302' : '\\302', '\303' : '\\303', + '\304' : '\\304', '\305' : '\\305', '\306' : '\\306', + '\307' : '\\307', '\310' : '\\310', '\311' : '\\311', + '\312' : '\\312', '\313' : '\\313', '\314' : '\\314', + '\315' : '\\315', '\316' : '\\316', '\317' : '\\317', + '\320' : '\\320', '\321' : '\\321', '\322' : '\\322', + '\323' : '\\323', '\324' : '\\324', '\325' : '\\325', + '\326' : '\\326', '\327' : '\\327', '\330' : '\\330', + '\331' : '\\331', '\332' : '\\332', '\333' : '\\333', + '\334' : '\\334', '\335' : '\\335', '\336' : '\\336', + '\337' : '\\337', '\340' : '\\340', '\341' : '\\341', + '\342' : '\\342', '\343' : '\\343', '\344' : '\\344', + '\345' : '\\345', '\346' : '\\346', '\347' : '\\347', + '\350' : '\\350', '\351' : '\\351', '\352' : '\\352', + '\353' : '\\353', '\354' : '\\354', '\355' : '\\355', + '\356' : '\\356', '\357' : '\\357', '\360' : '\\360', + '\361' : '\\361', '\362' : '\\362', '\363' : '\\363', + '\364' : '\\364', '\365' : '\\365', '\366' : '\\366', + '\367' : '\\367', '\370' : '\\370', '\371' : '\\371', + '\372' : '\\372', '\373' : '\\373', '\374' : '\\374', + '\375' : '\\375', '\376' : '\\376', '\377' : '\\377' + } + +_idmap = ''.join(chr(x) for x in xrange(256)) + +def _quote(str, LegalChars=_LegalChars, + idmap=_idmap, translate=string.translate): + # + # If the string does not need to be double-quoted, + # then just return the string. Otherwise, surround + # the string in doublequotes and precede quote (with a \) + # special characters. + # + if "" == translate(str, idmap, LegalChars): + return str + else: + return '"' + _nulljoin( map(_Translator.get, str, str) ) + '"' +# end _quote + + +_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]") +_QuotePatt = re.compile(r"[\\].") + +def _unquote(str): + # If there aren't any doublequotes, + # then there can't be any special characters. See RFC 2109. + if len(str) < 2: + return str + if str[0] != '"' or str[-1] != '"': + return str + + # We have to assume that we must decode this string. + # Down to work. + + # Remove the "s + str = str[1:-1] + + # Check for special sequences. Examples: + # \012 --> \n + # \" --> " + # + i = 0 + n = len(str) + res = [] + while 0 <= i < n: + Omatch = _OctalPatt.search(str, i) + Qmatch = _QuotePatt.search(str, i) + if not Omatch and not Qmatch: # Neither matched + res.append(str[i:]) + break + # else: + j = k = -1 + if Omatch: j = Omatch.start(0) + if Qmatch: k = Qmatch.start(0) + if Qmatch and ( not Omatch or k < j ): # QuotePatt matched + res.append(str[i:k]) + res.append(str[k+1]) + i = k+2 + else: # OctalPatt matched + res.append(str[i:j]) + res.append( chr( int(str[j+1:j+4], 8) ) ) + i = j+4 + return _nulljoin(res) +# end _unquote + +# The _getdate() routine is used to set the expiration time in +# the cookie's HTTP header. By default, _getdate() returns the +# current time in the appropriate "expires" format for a +# Set-Cookie header. The one optional argument is an offset from +# now, in seconds. For example, an offset of -3600 means "one hour ago". +# The offset may be a floating point number. +# + +_weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] + +_monthname = [None, + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] + +def _getdate(future=0, weekdayname=_weekdayname, monthname=_monthname): + from time import gmtime, time + now = time() + year, month, day, hh, mm, ss, wd, y, z = gmtime(now + future) + return "%s, %02d-%3s-%4d %02d:%02d:%02d GMT" % \ + (weekdayname[wd], day, monthname[month], year, hh, mm, ss) + + +# +# A class to hold ONE key,value pair. +# In a cookie, each such pair may have several attributes. +# so this class is used to keep the attributes associated +# with the appropriate key,value pair. +# This class also includes a coded_value attribute, which +# is used to hold the network representation of the +# value. This is most useful when Python objects are +# pickled for network transit. +# + +class Morsel(dict): + # RFC 2109 lists these attributes as reserved: + # path comment domain + # max-age secure version + # + # For historical reasons, these attributes are also reserved: + # expires + # + # This is an extension from Microsoft: + # httponly + # + # This dictionary provides a mapping from the lowercase + # variant on the left to the appropriate traditional + # formatting on the right. + _reserved = { "expires" : "expires", + "path" : "Path", + "comment" : "Comment", + "domain" : "Domain", + "max-age" : "Max-Age", + "secure" : "secure", + "httponly" : "httponly", + "version" : "Version", + } + + def __init__(self): + # Set defaults + self.key = self.value = self.coded_value = None + + # Set default attributes + for K in self._reserved: + dict.__setitem__(self, K, "") + # end __init__ + + def __setitem__(self, K, V): + K = K.lower() + if not K in self._reserved: + raise CookieError("Invalid Attribute %s" % K) + dict.__setitem__(self, K, V) + # end __setitem__ + + def isReservedKey(self, K): + return K.lower() in self._reserved + # end isReservedKey + + def set(self, key, val, coded_val, + LegalChars=_LegalChars, + idmap=_idmap, translate=string.translate): + # First we verify that the key isn't a reserved word + # Second we make sure it only contains legal characters + if key.lower() in self._reserved: + raise CookieError("Attempt to set a reserved key: %s" % key) + if "" != translate(key, idmap, LegalChars): + raise CookieError("Illegal key value: %s" % key) + + # It's a good key, so save it. + self.key = key + self.value = val + self.coded_value = coded_val + # end set + + def output(self, attrs=None, header = "Set-Cookie:"): + return "%s %s" % ( header, self.OutputString(attrs) ) + + __str__ = output + + def __repr__(self): + return '<%s: %s=%s>' % (self.__class__.__name__, + self.key, repr(self.value) ) + + def js_output(self, attrs=None): + # Print javascript + return """ + + """ % ( self.OutputString(attrs).replace('"',r'\"'), ) + # end js_output() + + def OutputString(self, attrs=None): + # Build up our result + # + result = [] + RA = result.append + + # First, the key=value pair + RA("%s=%s" % (self.key, self.coded_value)) + + # Now add any defined attributes + if attrs is None: + attrs = self._reserved + items = self.items() + items.sort() + for K,V in items: + if V == "": continue + if K not in attrs: continue + if K == "expires" and type(V) == type(1): + RA("%s=%s" % (self._reserved[K], _getdate(V))) + elif K == "max-age" and type(V) == type(1): + RA("%s=%d" % (self._reserved[K], V)) + elif K == "secure": + RA(str(self._reserved[K])) + elif K == "httponly": + RA(str(self._reserved[K])) + else: + RA("%s=%s" % (self._reserved[K], V)) + + # Return the result + return _semispacejoin(result) + # end OutputString +# end Morsel class + + + +# +# Pattern for finding cookie +# +# This used to be strict parsing based on the RFC2109 and RFC2068 +# specifications. I have since discovered that MSIE 3.0x doesn't +# follow the character rules outlined in those specs. As a +# result, the parsing rules here are less strict. +# + +_LegalCharsPatt = r"[\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=\[\]\_]" + +_CookiePattern = re.compile( + r"(?x)" # This is a Verbose pattern + r"(?P" # Start of group 'key' + ""+ _LegalCharsPatt +"+?" # Any word of at least one letter, nongreedy + r")" # End of group 'key' + r"\s*=\s*" # Equal Sign + r"(?P" # Start of group 'val' + r'"(?:[^\\"]|\\.)*"' # Any doublequoted string + r"|" # or + r"\w{3},\s[\w\d-]{9,11}\s[\d:]{8}\sGMT" # Special case for "expires" attr + r"|" # or + ""+ _LegalCharsPatt +"*" # Any word or empty string + r")" # End of group 'val' + r"\s*;?" # Probably ending in a semi-colon + ) + + +# At long last, here is the cookie class. +# Using this class is almost just like using a dictionary. +# See this module's docstring for example usage. +# +class BaseCookie(dict): + # A container class for a set of Morsels + # + + def value_decode(self, val): + """real_value, coded_value = value_decode(STRING) + Called prior to setting a cookie's value from the network + representation. The VALUE is the value read from HTTP + header. + Override this function to modify the behavior of cookies. + """ + return val, val + # end value_encode + + def value_encode(self, val): + """real_value, coded_value = value_encode(VALUE) + Called prior to setting a cookie's value from the dictionary + representation. The VALUE is the value being assigned. + Override this function to modify the behavior of cookies. + """ + strval = str(val) + return strval, strval + # end value_encode + + def __init__(self, input=None): + if input: self.load(input) + # end __init__ + + def __set(self, key, real_value, coded_value): + """Private method for setting a cookie's value""" + M = self.get(key, Morsel()) + M.set(key, real_value, coded_value) + dict.__setitem__(self, key, M) + # end __set + + def __setitem__(self, key, value): + """Dictionary style assignment.""" + rval, cval = self.value_encode(value) + self.__set(key, rval, cval) + # end __setitem__ + + def output(self, attrs=None, header="Set-Cookie:", sep="\015\012"): + """Return a string suitable for HTTP.""" + result = [] + items = self.items() + items.sort() + for K,V in items: + result.append( V.output(attrs, header) ) + return sep.join(result) + # end output + + __str__ = output + + def __repr__(self): + L = [] + items = self.items() + items.sort() + for K,V in items: + L.append( '%s=%s' % (K,repr(V.value) ) ) + return '<%s: %s>' % (self.__class__.__name__, _spacejoin(L)) + + def js_output(self, attrs=None): + """Return a string suitable for JavaScript.""" + result = [] + items = self.items() + items.sort() + for K,V in items: + result.append( V.js_output(attrs) ) + return _nulljoin(result) + # end js_output + + def load(self, rawdata): + """Load cookies from a string (presumably HTTP_COOKIE) or + from a dictionary. Loading cookies from a dictionary 'd' + is equivalent to calling: + map(Cookie.__setitem__, d.keys(), d.values()) + """ + if type(rawdata) == type(""): + self.__ParseString(rawdata) + else: + # self.update() wouldn't call our custom __setitem__ + for k, v in rawdata.items(): + self[k] = v + return + # end load() + + def __ParseString(self, str, patt=_CookiePattern): + i = 0 # Our starting point + n = len(str) # Length of string + M = None # current morsel + + while 0 <= i < n: + # Start looking for a cookie + match = patt.search(str, i) + if not match: break # No more cookies + + K,V = match.group("key"), match.group("val") + i = match.end(0) + + # Parse the key, value in case it's metainfo + if K[0] == "$": + # We ignore attributes which pertain to the cookie + # mechanism as a whole. See RFC 2109. + # (Does anyone care?) + if M: + M[ K[1:] ] = V + elif K.lower() in Morsel._reserved: + if M: + M[ K ] = _unquote(V) + else: + rval, cval = self.value_decode(V) + self.__set(K, rval, cval) + M = self[K] + # end __ParseString +# end BaseCookie class + +class SimpleCookie(BaseCookie): + """SimpleCookie + SimpleCookie supports strings as cookie values. When setting + the value using the dictionary assignment notation, SimpleCookie + calls the builtin str() to convert the value to a string. Values + received from HTTP are kept as strings. + """ + def value_decode(self, val): + return _unquote( val ), val + def value_encode(self, val): + strval = str(val) + return strval, _quote( strval ) +# end SimpleCookie + +class SerialCookie(BaseCookie): + """SerialCookie + SerialCookie supports arbitrary objects as cookie values. All + values are serialized (using cPickle) before being sent to the + client. All incoming values are assumed to be valid Pickle + representations. IF AN INCOMING VALUE IS NOT IN A VALID PICKLE + FORMAT, THEN AN EXCEPTION WILL BE RAISED. + + Note: Large cookie values add overhead because they must be + retransmitted on every HTTP transaction. + + Note: HTTP has a 2k limit on the size of a cookie. This class + does not check for this limit, so be careful!!! + """ + def __init__(self, input=None): + warnings.warn("SerialCookie class is insecure; do not use it", + DeprecationWarning) + BaseCookie.__init__(self, input) + # end __init__ + def value_decode(self, val): + # This could raise an exception! + return loads( _unquote(val) ), val + def value_encode(self, val): + return val, _quote( dumps(val) ) +# end SerialCookie + +class SmartCookie(BaseCookie): + """SmartCookie + SmartCookie supports arbitrary objects as cookie values. If the + object is a string, then it is quoted. If the object is not a + string, however, then SmartCookie will use cPickle to serialize + the object into a string representation. + + Note: Large cookie values add overhead because they must be + retransmitted on every HTTP transaction. + + Note: HTTP has a 2k limit on the size of a cookie. This class + does not check for this limit, so be careful!!! + """ + def __init__(self, input=None): + warnings.warn("Cookie/SmartCookie class is insecure; do not use it", + DeprecationWarning) + BaseCookie.__init__(self, input) + # end __init__ + def value_decode(self, val): + strval = _unquote(val) + try: + return loads(strval), val + except: + return strval, val + def value_encode(self, val): + if type(val) == type(""): + return val, _quote(val) + else: + return val, _quote( dumps(val) ) +# end SmartCookie + + +########################################################### +# Backwards Compatibility: Don't break any existing code! + +# We provide Cookie() as an alias for SmartCookie() +Cookie = SmartCookie + +# +########################################################### + +def _test(): + import doctest, Cookie + return doctest.testmod(Cookie) + +if __name__ == "__main__": + _test() + + +#Local Variables: +#tab-width: 4 +#end: diff --git a/src/archivematicaCommon/lib/externals/requests/packages/oreos/structures.py b/src/archivematicaCommon/lib/externals/requests/packages/oreos/structures.py new file mode 100644 index 0000000000..063d5f96a8 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/packages/oreos/structures.py @@ -0,0 +1,399 @@ +# -*- coding: utf-8 -*- + +""" +oreos.sructures +~~~~~~~~~~~~~~~ + +The plastic blue packaging. + +This is mostly directly stolen from mitsuhiko/werkzeug. +""" + +__all__ = ('MultiDict',) + +class _Missing(object): + + def __repr__(self): + return 'no value' + + def __reduce__(self): + return '_missing' + +_missing = _Missing() + + + +def iter_multi_items(mapping): + """Iterates over the items of a mapping yielding keys and values + without dropping any from more complex structures. + """ + if isinstance(mapping, MultiDict): + for item in mapping.iteritems(multi=True): + yield item + elif isinstance(mapping, dict): + for key, value in mapping.iteritems(): + if isinstance(value, (tuple, list)): + for value in value: + yield key, value + else: + yield key, value + else: + for item in mapping: + yield item + + + +class TypeConversionDict(dict): + """Works like a regular dict but the :meth:`get` method can perform + type conversions. :class:`MultiDict` and :class:`CombinedMultiDict` + are subclasses of this class and provide the same feature. + + .. versionadded:: 0.5 + """ + + def get(self, key, default=None, type=None): + """Return the default value if the requested data doesn't exist. + If `type` is provided and is a callable it should convert the value, + return it or raise a :exc:`ValueError` if that is not possible. In + this case the function will return the default as if the value was not + found: + + >>> d = TypeConversionDict(foo='42', bar='blub') + >>> d.get('foo', type=int) + 42 + >>> d.get('bar', -1, type=int) + -1 + + :param key: The key to be looked up. + :param default: The default value to be returned if the key can't + be looked up. If not further specified `None` is + returned. + :param type: A callable that is used to cast the value in the + :class:`MultiDict`. If a :exc:`ValueError` is raised + by this callable the default value is returned. + """ + try: + rv = self[key] + if type is not None: + rv = type(rv) + except (KeyError, ValueError): + rv = default + return rv + + +class MultiDict(TypeConversionDict): + """A :class:`MultiDict` is a dictionary subclass customized to deal with + multiple values for the same key which is for example used by the parsing + functions in the wrappers. This is necessary because some HTML form + elements pass multiple values for the same key. + + :class:`MultiDict` implements all standard dictionary methods. + Internally, it saves all values for a key as a list, but the standard dict + access methods will only return the first value for a key. If you want to + gain access to the other values, too, you have to use the `list` methods as + explained below. + + Basic Usage: + + >>> d = MultiDict([('a', 'b'), ('a', 'c')]) + >>> d + MultiDict([('a', 'b'), ('a', 'c')]) + >>> d['a'] + 'b' + >>> d.getlist('a') + ['b', 'c'] + >>> 'a' in d + True + + It behaves like a normal dict thus all dict functions will only return the + first value when multiple values for one key are found. + + From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a + subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will + render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP + exceptions. + + A :class:`MultiDict` can be constructed from an iterable of + ``(key, value)`` tuples, a dict, a :class:`MultiDict` or from Werkzeug 0.2 + onwards some keyword parameters. + + :param mapping: the initial value for the :class:`MultiDict`. Either a + regular dict, an iterable of ``(key, value)`` tuples + or `None`. + """ + + def __init__(self, mapping=None): + if isinstance(mapping, MultiDict): + dict.__init__(self, ((k, l[:]) for k, l in mapping.iterlists())) + elif isinstance(mapping, dict): + tmp = {} + for key, value in mapping.iteritems(): + if isinstance(value, (tuple, list)): + value = list(value) + else: + value = [value] + tmp[key] = value + dict.__init__(self, tmp) + else: + tmp = {} + for key, value in mapping or (): + tmp.setdefault(key, []).append(value) + dict.__init__(self, tmp) + + def __getstate__(self): + return dict(self.lists()) + + def __setstate__(self, value): + dict.clear(self) + dict.update(self, value) + + def __iter__(self): + return self.iterkeys() + + def __getitem__(self, key): + """Return the first data value for this key; + raises KeyError if not found. + + :param key: The key to be looked up. + :raise KeyError: if the key does not exist. + """ + if key in self: + return dict.__getitem__(self, key)[0] + raise KeyError(key) + + def __setitem__(self, key, value): + """Like :meth:`add` but removes an existing key first. + + :param key: the key for the value. + :param value: the value to set. + """ + dict.__setitem__(self, key, [value]) + + def add(self, key, value): + """Adds a new value for the key. + + .. versionadded:: 0.6 + + :param key: the key for the value. + :param value: the value to add. + """ + dict.setdefault(self, key, []).append(value) + + def getlist(self, key, type=None): + """Return the list of items for a given key. If that key is not in the + `MultiDict`, the return value will be an empty list. Just as `get` + `getlist` accepts a `type` parameter. All items will be converted + with the callable defined there. + + :param key: The key to be looked up. + :param type: A callable that is used to cast the value in the + :class:`MultiDict`. If a :exc:`ValueError` is raised + by this callable the value will be removed from the list. + :return: a :class:`list` of all the values for the key. + """ + try: + rv = dict.__getitem__(self, key) + except KeyError: + return [] + if type is None: + return list(rv) + result = [] + for item in rv: + try: + result.append(type(item)) + except ValueError: + pass + return result + + def setlist(self, key, new_list): + """Remove the old values for a key and add new ones. Note that the list + you pass the values in will be shallow-copied before it is inserted in + the dictionary. + + >>> d = MultiDict() + >>> d.setlist('foo', ['1', '2']) + >>> d['foo'] + '1' + >>> d.getlist('foo') + ['1', '2'] + + :param key: The key for which the values are set. + :param new_list: An iterable with the new values for the key. Old values + are removed first. + """ + dict.__setitem__(self, key, list(new_list)) + + def setdefault(self, key, default=None): + """Returns the value for the key if it is in the dict, otherwise it + returns `default` and sets that value for `key`. + + :param key: The key to be looked up. + :param default: The default value to be returned if the key is not + in the dict. If not further specified it's `None`. + """ + if key not in self: + self[key] = default + else: + default = self[key] + return default + + def setlistdefault(self, key, default_list=None): + """Like `setdefault` but sets multiple values. The list returned + is not a copy, but the list that is actually used internally. This + means that you can put new values into the dict by appending items + to the list: + + >>> d = MultiDict({"foo": 1}) + >>> d.setlistdefault("foo").extend([2, 3]) + >>> d.getlist("foo") + [1, 2, 3] + + :param key: The key to be looked up. + :param default: An iterable of default values. It is either copied + (in case it was a list) or converted into a list + before returned. + :return: a :class:`list` + """ + if key not in self: + default_list = list(default_list or ()) + dict.__setitem__(self, key, default_list) + else: + default_list = dict.__getitem__(self, key) + return default_list + + def items(self, multi=False): + """Return a list of ``(key, value)`` pairs. + + :param multi: If set to `True` the list returned will have a + pair for each value of each key. Otherwise it + will only contain pairs for the first value of + each key. + + :return: a :class:`list` + """ + return list(self.iteritems(multi)) + + def lists(self): + """Return a list of ``(key, values)`` pairs, where values is the list of + all values associated with the key. + + :return: a :class:`list` + """ + return list(self.iterlists()) + + def values(self): + """Returns a list of the first value on every key's value list. + + :return: a :class:`list`. + """ + return [self[key] for key in self.iterkeys()] + + def listvalues(self): + """Return a list of all values associated with a key. Zipping + :meth:`keys` and this is the same as calling :meth:`lists`: + + >>> d = MultiDict({"foo": [1, 2, 3]}) + >>> zip(d.keys(), d.listvalues()) == d.lists() + True + + :return: a :class:`list` + """ + return list(self.iterlistvalues()) + + def iteritems(self, multi=False): + """Like :meth:`items` but returns an iterator.""" + for key, values in dict.iteritems(self): + if multi: + for value in values: + yield key, value + else: + yield key, values[0] + + def iterlists(self): + """Like :meth:`items` but returns an iterator.""" + for key, values in dict.iteritems(self): + yield key, list(values) + + def itervalues(self): + """Like :meth:`values` but returns an iterator.""" + for values in dict.itervalues(self): + yield values[0] + + def iterlistvalues(self): + """Like :meth:`listvalues` but returns an iterator.""" + return dict.itervalues(self) + + def copy(self): + """Return a shallow copy of this object.""" + return self.__class__(self) + + def to_dict(self, flat=True): + """Return the contents as regular dict. If `flat` is `True` the + returned dict will only have the first item present, if `flat` is + `False` all values will be returned as lists. + + :param flat: If set to `False` the dict returned will have lists + with all the values in it. Otherwise it will only + contain the first value for each key. + :return: a :class:`dict` + """ + if flat: + return dict(self.iteritems()) + return dict(self.lists()) + + def update(self, other_dict): + """update() extends rather than replaces existing key lists.""" + for key, value in iter_multi_items(other_dict): + MultiDict.add(self, key, value) + + def pop(self, key, default=_missing): + """Pop the first item for a list on the dict. Afterwards the + key is removed from the dict, so additional values are discarded: + + >>> d = MultiDict({"foo": [1, 2, 3]}) + >>> d.pop("foo") + 1 + >>> "foo" in d + False + + :param key: the key to pop. + :param default: if provided the value to return if the key was + not in the dictionary. + """ + try: + return dict.pop(self, key)[0] + except KeyError, e: + if default is not _missing: + return default + raise KeyError(str(e)) + + def popitem(self): + """Pop an item from the dict.""" + try: + item = dict.popitem(self) + return (item[0], item[1][0]) + except KeyError, e: + raise KeyError(str(e)) + + def poplist(self, key): + """Pop the list for a key from the dict. If the key is not in the dict + an empty list is returned. + + .. versionchanged:: 0.5 + If the key does no longer exist a list is returned instead of + raising an error. + """ + return dict.pop(self, key, []) + + def popitemlist(self): + """Pop a ``(key, list)`` tuple from the dict.""" + try: + return dict.popitem(self) + except KeyError, e: + raise KeyError(str(e)) + + def __copy__(self): + return self.copy() + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, self.items(multi=True)) diff --git a/src/archivematicaCommon/lib/externals/requests/packages/urllib3/__init__.py b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/__init__.py new file mode 100644 index 0000000000..20b1fb4ea7 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/__init__.py @@ -0,0 +1,48 @@ +# urllib3/__init__.py +# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +urllib3 - Thread-safe connection pooling and re-using. +""" + +__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' +__license__ = 'MIT' +__version__ = '1.0.2' + + +from .connectionpool import ( + HTTPConnectionPool, + HTTPSConnectionPool, + connection_from_url, + get_host, + make_headers) + + +from .exceptions import ( + HTTPError, + MaxRetryError, + SSLError, + TimeoutError) + +from .poolmanager import PoolManager, ProxyManager, proxy_from_url +from .response import HTTPResponse +from .filepost import encode_multipart_formdata + + +# Set default logging handler to avoid "No handler found" warnings. +import logging +try: + from logging import NullHandler +except ImportError: + class NullHandler(logging.Handler): + def emit(self, record): + pass + +logging.getLogger(__name__).addHandler(NullHandler()) + +# ... Clean up. +del logging +del NullHandler diff --git a/src/archivematicaCommon/lib/externals/requests/packages/urllib3/_collections.py b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/_collections.py new file mode 100644 index 0000000000..00b2cd58c8 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/_collections.py @@ -0,0 +1,131 @@ +# urllib3/_collections.py +# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from collections import deque + +from threading import RLock + +__all__ = ['RecentlyUsedContainer'] + + +class AccessEntry(object): + __slots__ = ('key', 'is_valid') + + def __init__(self, key, is_valid=True): + self.key = key + self.is_valid = is_valid + + +class RecentlyUsedContainer(dict): + """ + Provides a dict-like that maintains up to ``maxsize`` keys while throwing + away the least-recently-used keys beyond ``maxsize``. + """ + + # If len(self.access_log) exceeds self._maxsize * CLEANUP_FACTOR, then we + # will attempt to cleanup the invalidated entries in the access_log + # datastructure during the next 'get' operation. + CLEANUP_FACTOR = 10 + + def __init__(self, maxsize=10): + self._maxsize = maxsize + + self._container = {} + + # We use a deque to to store our keys ordered by the last access. + self.access_log = deque() + self.access_log_lock = RLock() + + # We look up the access log entry by the key to invalidate it so we can + # insert a new authorative entry at the head without having to dig and + # find the old entry for removal immediately. + self.access_lookup = {} + + # Trigger a heap cleanup when we get past this size + self.access_log_limit = maxsize * self.CLEANUP_FACTOR + + def _invalidate_entry(self, key): + "If exists: Invalidate old entry and return it." + old_entry = self.access_lookup.get(key) + if old_entry: + old_entry.is_valid = False + + return old_entry + + def _push_entry(self, key): + "Push entry onto our access log, invalidate the old entry if exists." + self._invalidate_entry(key) + + new_entry = AccessEntry(key) + self.access_lookup[key] = new_entry + + self.access_log_lock.acquire() + self.access_log.appendleft(new_entry) + self.access_log_lock.release() + + def _prune_entries(self, num): + "Pop entries from our access log until we popped ``num`` valid ones." + while num > 0: + self.access_log_lock.acquire() + p = self.access_log.pop() + self.access_log_lock.release() + + if not p.is_valid: + continue # Invalidated entry, skip + + dict.pop(self, p.key, None) + self.access_lookup.pop(p.key, None) + num -= 1 + + def _prune_invalidated_entries(self): + "Rebuild our access_log without the invalidated entries." + self.access_log_lock.acquire() + self.access_log = deque(e for e in self.access_log if e.is_valid) + self.access_log_lock.release() + + def _get_ordered_access_keys(self): + "Return ordered access keys for inspection. Used for testing." + self.access_log_lock.acquire() + r = [e.key for e in self.access_log if e.is_valid] + self.access_log_lock.release() + + return r + + def __getitem__(self, key): + item = dict.get(self, key) + + if not item: + raise KeyError(key) + + # Insert new entry with new high priority, also implicitly invalidates + # the old entry. + self._push_entry(key) + + if len(self.access_log) > self.access_log_limit: + # Heap is getting too big, try to clean up any tailing invalidated + # entries. + self._prune_invalidated_entries() + + return item + + def __setitem__(self, key, item): + # Add item to our container and access log + dict.__setitem__(self, key, item) + self._push_entry(key) + + # Discard invalid and excess entries + self._prune_entries(len(self) - self._maxsize) + + def __delitem__(self, key): + self._invalidate_entry(key) + self.access_lookup.pop(key, None) + dict.__delitem__(self, key) + + def get(self, key, default=None): + try: + return self[key] + except KeyError: + return default diff --git a/src/archivematicaCommon/lib/externals/requests/packages/urllib3/connectionpool.py b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/connectionpool.py new file mode 100644 index 0000000000..8b10dc7095 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/connectionpool.py @@ -0,0 +1,525 @@ +# urllib3/connectionpool.py +# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import logging +import socket + + +from httplib import HTTPConnection, HTTPSConnection, HTTPException +from Queue import Queue, Empty, Full +from select import select +from socket import error as SocketError, timeout as SocketTimeout + + +try: + import ssl + BaseSSLError = ssl.SSLError +except ImportError: + ssl = None + BaseSSLError = None + + +from .request import RequestMethods +from .response import HTTPResponse +from .exceptions import ( + SSLError, + MaxRetryError, + TimeoutError, + HostChangedError, + EmptyPoolError, +) + + +log = logging.getLogger(__name__) + +_Default = object() + + +## Connection objects (extension of httplib) + +class VerifiedHTTPSConnection(HTTPSConnection): + """ + Based on httplib.HTTPSConnection but wraps the socket with + SSL certification. + """ + cert_reqs = None + ca_certs = None + + def set_cert(self, key_file=None, cert_file=None, + cert_reqs='CERT_NONE', ca_certs=None): + ssl_req_scheme = { + 'CERT_NONE': ssl.CERT_NONE, + 'CERT_OPTIONAL': ssl.CERT_OPTIONAL, + 'CERT_REQUIRED': ssl.CERT_REQUIRED + } + + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = ssl_req_scheme.get(cert_reqs) or ssl.CERT_NONE + self.ca_certs = ca_certs + + def connect(self): + # Add certificate verification + sock = socket.create_connection((self.host, self.port), self.timeout) + + # Wrap socket using verification with the root certs in + # trusted_root_certs + self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, + cert_reqs=self.cert_reqs, + ca_certs=self.ca_certs) + + +## Pool objects + +class ConnectionPool(object): + """ + Base class for all connection pools, such as + :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. + """ + pass + + +class HTTPConnectionPool(ConnectionPool, RequestMethods): + """ + Thread-safe connection pool for one host. + + :param host: + Host used for this HTTP Connection (e.g. "localhost"), passed into + :class:`httplib.HTTPConnection`. + + :param port: + Port used for this HTTP Connection (None is equivalent to 80), passed + into :class:`httplib.HTTPConnection`. + + :param strict: + Causes BadStatusLine to be raised if the status line can't be parsed + as a valid HTTP/1.0 or 1.1 status line, passed into + :class:`httplib.HTTPConnection`. + + :param timeout: + Socket timeout for each individual connection, can be a float. None + disables timeout. + + :param maxsize: + Number of connections to save that can be reused. More than 1 is useful + in multithreaded situations. If ``block`` is set to false, more + connections will be created but they will not be saved once they've + been used. + + :param block: + If set to True, no more than ``maxsize`` connections will be used at + a time. When no free connections are available, the call will block + until a connection has been released. This is a useful side effect for + particular multithreaded situations where one does not want to use more + than maxsize connections per host to prevent flooding. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + """ + + scheme = 'http' + + def __init__(self, host, port=None, strict=False, timeout=None, maxsize=1, + block=False, headers=None): + self.host = host + self.port = port + self.strict = strict + self.timeout = timeout + self.pool = Queue(maxsize) + self.block = block + self.headers = headers or {} + + # Fill the queue up so that doing get() on it will block properly + for _ in xrange(maxsize): + self.pool.put(None) + + # These are mostly for testing and debugging purposes. + self.num_connections = 0 + self.num_requests = 0 + + def _new_conn(self): + """ + Return a fresh :class:`httplib.HTTPConnection`. + """ + self.num_connections += 1 + log.info("Starting new HTTP connection (%d): %s" % + (self.num_connections, self.host)) + return HTTPConnection(host=self.host, port=self.port) + + def _get_conn(self, timeout=None): + """ + Get a connection. Will return a pooled connection if one is available. + + If no connections are available and :prop:`.block` is ``False``, then a + fresh connection is returned. + + :param timeout: + Seconds to wait before giving up and raising + :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and + :prop:`.block` is ``True``. + """ + conn = None + try: + conn = self.pool.get(block=self.block, timeout=timeout) + + # If this is a persistent connection, check if it got disconnected + if conn and conn.sock and select([conn.sock], [], [], 0.0)[0]: + # Either data is buffered (bad), or the connection is dropped. + log.info("Resetting dropped connection: %s" % self.host) + conn.close() + + except Empty: + if self.block: + raise EmptyPoolError("Pool reached maximum size and no more " + "connections are allowed.") + pass # Oh well, we'll create a new connection then + + return conn or self._new_conn() + + def _put_conn(self, conn): + """ + Put a connection back into the pool. + + :param conn: + Connection object for the current host and port as returned by + :meth:`._new_conn` or :meth:`._get_conn`. + + If the pool is already full, the connection is discarded because we + exceeded maxsize. If connections are discarded frequently, then maxsize + should be increased. + """ + try: + self.pool.put(conn, block=False) + except Full: + # This should never happen if self.block == True + log.warning("HttpConnectionPool is full, discarding connection: %s" + % self.host) + + def _make_request(self, conn, method, url, timeout=_Default, + **httplib_request_kw): + """ + Perform a request on a given httplib connection object taken from our + pool. + """ + self.num_requests += 1 + + if timeout is _Default: + timeout = self.timeout + + conn.request(method, url, **httplib_request_kw) + conn.sock.settimeout(timeout) + httplib_response = conn.getresponse() + + log.debug("\"%s %s %s\" %s %s" % + (method, url, + conn._http_vsn_str, # pylint: disable-msg=W0212 + httplib_response.status, httplib_response.length)) + + return httplib_response + + + def is_same_host(self, url): + """ + Check if the given ``url`` is a member of the same host as this + conncetion pool. + """ + # TODO: Add optional support for socket.gethostbyname checking. + return (url.startswith('/') or + get_host(url) == (self.scheme, self.host, self.port)) + + def urlopen(self, method, url, body=None, headers=None, retries=3, + redirect=True, assert_same_host=True, timeout=_Default, + pool_timeout=None, release_conn=None, **response_kw): + """ + Get a connection from the pool and perform an HTTP request. This is the + lowest level call for making a request, so you'll need to specify all + the raw details. + + .. note:: + + More commonly, it's appropriate to use a convenience method provided + by :class:`.RequestMethods`, such as :meth:`.request`. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param body: + Data to send in the request body (useful for creating + POST requests, see HTTPConnectionPool.post_url for + more convenience). + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param retries: + Number of retries to allow before raising a MaxRetryError exception. + + :param redirect: + Automatically handle redirects (status codes 301, 302, 303, 307), + each redirect counts as a retry. + + :param assert_same_host: + If ``True``, will make sure that the host of the pool requests is + consistent else will raise HostChangedError. When False, you can + use the pool on an HTTP proxy and request foreign hosts. + + :param timeout: + If specified, overrides the default timeout for this one request. + + :param pool_timeout: + If set and the pool is set to block=True, then this method will + block for ``pool_timeout`` seconds and raise EmptyPoolError if no + connection is available within the time period. + + :param release_conn: + If False, then the urlopen call will not release the connection + back into the pool once a response is received. This is useful if + you're not preloading the response's content immediately. You will + need to call ``r.release_conn()`` on the response ``r`` to return + the connection back into the pool. If None, it takes the value of + ``response_kw.get('preload_content', True)``. + + :param \**response_kw: + Additional parameters are passed to + :meth:`urllib3.response.HTTPResponse.from_httplib` + """ + if headers is None: + headers = self.headers + + if retries < 0: + raise MaxRetryError("Max retries exceeded for url: %s" % url) + + if release_conn is None: + release_conn = response_kw.get('preload_content', True) + + # Check host + if assert_same_host and not self.is_same_host(url): + host = "%s://%s" % (self.scheme, self.host) + if self.port: + host = "%s:%d" % (host, self.port) + + raise HostChangedError("Connection pool with host '%s' tried to " + "open a foreign host: %s" % (host, url)) + + conn = None + + try: + # Request a connection from the queue + # (Could raise SocketError: Bad file descriptor) + conn = self._get_conn(timeout=pool_timeout) + + # Make the request on the httplib connection object + httplib_response = self._make_request(conn, method, url, + timeout=timeout, + body=body, headers=headers) + + # If we're going to release the connection in ``finally:``, then + # the request doesn't need to know about the connection. Otherwise + # it will also try to release it and we'll have a double-release + # mess. + response_conn = not release_conn and conn + + # Import httplib's response into our own wrapper object + response = HTTPResponse.from_httplib(httplib_response, + pool=self, + connection=response_conn, + **response_kw) + + # else: + # The connection will be put back into the pool when + # ``response.release_conn()`` is called (implicitly by + # ``response.read()``) + + except (SocketTimeout, Empty), e: + # Timed out either by socket or queue + raise TimeoutError("Request timed out after %s seconds" % + self.timeout) + + except (BaseSSLError), e: + # SSL certificate error + raise SSLError(e) + + except (HTTPException, SocketError), e: + # Connection broken, discard. It will be replaced next _get_conn(). + conn = None + + finally: + if conn and release_conn: + # Put the connection back to be reused + self._put_conn(conn) + + if not conn: + log.warn("Retrying (%d attempts remain) after connection " + "broken by '%r': %s" % (retries, e, url)) + return self.urlopen(method, url, body, headers, retries - 1, + redirect, assert_same_host) # Try again + + # Handle redirection + if (redirect and + response.status in [301, 302, 303, 307] and + 'location' in response.headers): # Redirect, retry + log.info("Redirecting %s -> %s" % + (url, response.headers.get('location'))) + return self.urlopen(method, response.headers.get('location'), body, + headers, retries - 1, redirect, + assert_same_host) + + return response + + +class HTTPSConnectionPool(HTTPConnectionPool): + """ + Same as :class:`.HTTPConnectionPool`, but HTTPS. + + When Python is compiled with the :mod:`ssl` module, then + :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates, + instead of :class:httplib.HTTPSConnection`. + + The ``key_file``, ``cert_file``, ``cert_reqs``, and ``ca_certs`` parameters + are only used if :mod:`ssl` is available and are fed into + :meth:`ssl.wrap_socket` to upgrade the connection socket into an SSL socket. + """ + + scheme = 'https' + + def __init__(self, host, port=None, + strict=False, timeout=None, maxsize=1, + block=False, headers=None, + key_file=None, cert_file=None, + cert_reqs='CERT_NONE', ca_certs=None): + + super(HTTPSConnectionPool, self).__init__(host, port, + strict, timeout, maxsize, + block, headers) + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.ca_certs = ca_certs + + def _new_conn(self): + """ + Return a fresh :class:`httplib.HTTPSConnection`. + """ + self.num_connections += 1 + log.info("Starting new HTTPS connection (%d): %s" + % (self.num_connections, self.host)) + + if not ssl: + return HTTPSConnection(host=self.host, port=self.port) + + connection = VerifiedHTTPSConnection(host=self.host, port=self.port) + connection.set_cert(key_file=self.key_file, cert_file=self.cert_file, + cert_reqs=self.cert_reqs, ca_certs=self.ca_certs) + return connection + + +## Helpers + +def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, + basic_auth=None): + """ + Shortcuts for generating request headers. + + :param keep_alive: + If ``True``, adds 'connection: keep-alive' header. + + :param accept_encoding: + Can be a boolean, list, or string. + ``True`` translates to 'gzip,deflate'. + List will get joined by comma. + String will be used as provided. + + :param user_agent: + String representing the user-agent you want, such as + "python-urllib3/0.6" + + :param basic_auth: + Colon-separated username:password string for 'authorization: basic ...' + auth header. + + Example: :: + + >>> make_headers(keep_alive=True, user_agent="Batman/1.0") + {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} + >>> make_headers(accept_encoding=True) + {'accept-encoding': 'gzip,deflate'} + """ + headers = {} + if accept_encoding: + if isinstance(accept_encoding, str): + pass + elif isinstance(accept_encoding, list): + accept_encoding = ','.join(accept_encoding) + else: + accept_encoding = 'gzip,deflate' + headers['accept-encoding'] = accept_encoding + + if user_agent: + headers['user-agent'] = user_agent + + if keep_alive: + headers['connection'] = 'keep-alive' + + if basic_auth: + headers['authorization'] = 'Basic ' + \ + basic_auth.encode('base64').strip() + + return headers + + +def get_host(url): + """ + Given a url, return its scheme, host and port (None if it's not there). + + For example: :: + + >>> get_host('http://google.com/mail/') + ('http', 'google.com', None) + >>> get_host('google.com:80') + ('http', 'google.com', 80) + """ + # This code is actually similar to urlparse.urlsplit, but much + # simplified for our needs. + port = None + scheme = 'http' + if '//' in url: + scheme, url = url.split('://', 1) + if '/' in url: + url, _path = url.split('/', 1) + if ':' in url: + url, port = url.split(':', 1) + port = int(port) + return scheme, url, port + + +def connection_from_url(url, **kw): + """ + Given a url, return an :class:`.ConnectionPool` instance of its host. + + This is a shortcut for not having to parse out the scheme, host, and port + of the url before creating an :class:`.ConnectionPool` instance. + + :param url: + Absolute URL string that must include the scheme. Port is optional. + + :param \**kw: + Passes additional parameters to the constructor of the appropriate + :class:`.ConnectionPool`. Useful for specifying things like + timeout, maxsize, headers, etc. + + Example: :: + + >>> conn = connection_from_url('http://google.com/') + >>> r = conn.request('GET', '/') + """ + scheme, host, port = get_host(url) + if scheme == 'https': + return HTTPSConnectionPool(host, port=port, **kw) + else: + return HTTPConnectionPool(host, port=port, **kw) diff --git a/src/archivematicaCommon/lib/externals/requests/packages/urllib3/contrib/__init__.py b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/contrib/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/archivematicaCommon/lib/externals/requests/packages/urllib3/contrib/ntlmpool.py b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/contrib/ntlmpool.py new file mode 100644 index 0000000000..c5f010e190 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/contrib/ntlmpool.py @@ -0,0 +1,117 @@ +# urllib3/contrib/ntlmpool.py +# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +NTLM authenticating pool, contributed by erikcederstran + +Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10 +""" + +import httplib +from logging import getLogger +from ntlm import ntlm + +from urllib3 import HTTPSConnectionPool + + +log = getLogger(__name__) + + +class NTLMConnectionPool(HTTPSConnectionPool): + """ + Implements an NTLM authentication version of an urllib3 connection pool + """ + + scheme = 'https' + + def __init__(self, user, pw, authurl, *args, **kwargs): + """ + authurl is a random URL on the server that is protected by NTLM. + user is the Windows user, probably in the DOMAIN\username format. + pw is the password for the user. + """ + super(NTLMConnectionPool, self).__init__(*args, **kwargs) + self.authurl = authurl + self.rawuser = user + user_parts = user.split('\\', 1) + self.domain = user_parts[0].upper() + self.user = user_parts[1] + self.pw = pw + + def _new_conn(self): + # Performs the NTLM handshake that secures the connection. The socket + # must be kept open while requests are performed. + self.num_connections += 1 + log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s' % + (self.num_connections, self.host, self.authurl)) + + headers = {} + headers['Connection'] = 'Keep-Alive' + req_header = 'Authorization' + resp_header = 'www-authenticate' + + conn = httplib.HTTPSConnection(host=self.host, port=self.port) + + # Send negotiation message + headers[req_header] = ( + 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser)) + log.debug('Request headers: %s' % headers) + conn.request('GET', self.authurl, None, headers) + res = conn.getresponse() + reshdr = dict(res.getheaders()) + log.debug('Response status: %s %s' % (res.status, res.reason)) + log.debug('Response headers: %s' % reshdr) + log.debug('Response data: %s [...]' % res.read(100)) + + # Remove the reference to the socket, so that it can not be closed by + # the response object (we want to keep the socket open) + res.fp = None + + # Server should respond with a challenge message + auth_header_values = reshdr[resp_header].split(', ') + auth_header_value = None + for s in auth_header_values: + if s[:5] == 'NTLM ': + auth_header_value = s[5:] + if auth_header_value is None: + raise Exception('Unexpected %s response header: %s' % + (resp_header, reshdr[resp_header])) + + # Send authentication message + ServerChallenge, NegotiateFlags = \ + ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value) + auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge, + self.user, + self.domain, + self.pw, + NegotiateFlags) + headers[req_header] = 'NTLM %s' % auth_msg + log.debug('Request headers: %s' % headers) + conn.request('GET', self.authurl, None, headers) + res = conn.getresponse() + log.debug('Response status: %s %s' % (res.status, res.reason)) + log.debug('Response headers: %s' % dict(res.getheaders())) + log.debug('Response data: %s [...]' % res.read()[:100]) + if res.status != 200: + if res.status == 401: + raise Exception('Server rejected request: wrong ' + 'username or password') + raise Exception('Wrong server response: %s %s' % + (res.status, res.reason)) + + res.fp = None + log.debug('Connection established') + return conn + + def urlopen(self, method, url, body=None, headers=None, retries=3, + redirect=True, assert_same_host=True): + if headers is None: + headers = {} + headers['Connection'] = 'Keep-Alive' + return super(NTLMConnectionPool, self).urlopen(method, url, body, + headers, retries, + redirect, + assert_same_host) diff --git a/src/archivematicaCommon/lib/externals/requests/packages/urllib3/exceptions.py b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/exceptions.py new file mode 100644 index 0000000000..69f459bdc9 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/exceptions.py @@ -0,0 +1,35 @@ +# urllib3/exceptions.py +# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +## Exceptions + +class HTTPError(Exception): + "Base exception used by this module." + pass + + +class SSLError(Exception): + "Raised when SSL certificate fails in an HTTPS connection." + pass + + +class MaxRetryError(HTTPError): + "Raised when the maximum number of retries is exceeded." + pass + + +class TimeoutError(HTTPError): + "Raised when a socket timeout occurs." + pass + + +class HostChangedError(HTTPError): + "Raised when an existing pool gets a request for a foreign host." + pass + +class EmptyPoolError(HTTPError): + "Raised when a pool runs out of connections and no more are allowed." + pass diff --git a/src/archivematicaCommon/lib/externals/requests/packages/urllib3/filepost.py b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/filepost.py new file mode 100644 index 0000000000..2ffea8bbf5 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/filepost.py @@ -0,0 +1,71 @@ +# urllib3/filepost.py +# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import codecs +import mimetools +import mimetypes + +try: + from cStringIO import StringIO +except ImportError: + from StringIO import StringIO # pylint: disable-msg=W0404 + + +writer = codecs.lookup('utf-8')[3] + + +def get_content_type(filename): + return mimetypes.guess_type(filename)[0] or 'application/octet-stream' + + +def encode_multipart_formdata(fields, boundary=None): + """ + Encode a dictionary of ``fields`` using the multipart/form-data mime format. + + :param fields: + Dictionary of fields. The key is treated as the field name, and the + value as the body of the form-data. If the value is a tuple of two + elements, then the first element is treated as the filename of the + form-data section. + + :param boundary: + If not specified, then a random boundary will be generated using + :func:`mimetools.choose_boundary`. + """ + body = StringIO() + if boundary is None: + boundary = mimetools.choose_boundary() + + for fieldname, value in fields.iteritems(): + body.write('--%s\r\n' % (boundary)) + + if isinstance(value, tuple): + filename, data = value + writer(body).write('Content-Disposition: form-data; name="%s"; ' + 'filename="%s"\r\n' % (fieldname, filename)) + body.write('Content-Type: %s\r\n\r\n' % + (get_content_type(filename))) + else: + data = value + writer(body).write('Content-Disposition: form-data; name="%s"\r\n' + % (fieldname)) + body.write('Content-Type: text/plain\r\n\r\n') + + if isinstance(data, int): + data = str(data) # Backwards compatibility + + if isinstance(data, unicode): + writer(body).write(data) + else: + body.write(data) + + body.write('\r\n') + + body.write('--%s--\r\n' % (boundary)) + + content_type = 'multipart/form-data; boundary=%s' % boundary + + return body.getvalue(), content_type diff --git a/src/archivematicaCommon/lib/externals/requests/packages/urllib3/poolmanager.py b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/poolmanager.py new file mode 100644 index 0000000000..c08e327f81 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/poolmanager.py @@ -0,0 +1,128 @@ +# urllib3/poolmanager.py +# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from ._collections import RecentlyUsedContainer +from .connectionpool import ( + HTTPConnectionPool, HTTPSConnectionPool, + get_host, connection_from_url, +) + + +__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url'] + + +from .request import RequestMethods +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool + + +pool_classes_by_scheme = { + 'http': HTTPConnectionPool, + 'https': HTTPSConnectionPool, +} + +port_by_scheme = { + 'http': 80, + 'https': 443, +} + + +class PoolManager(RequestMethods): + """ + Allows for arbitrary requests while transparently keeping track of + necessary connection pools for you. + + :param num_pools: + Number of connection pools to cache before discarding the least recently + used pool. + + :param \**connection_pool_kw: + Additional parameters are used to create fresh + :class:`urllib3.connectionpool.ConnectionPool` instances. + + Example: :: + + >>> manager = PoolManager() + >>> r = manager.urlopen("http://google.com/") + >>> r = manager.urlopen("http://google.com/mail") + >>> r = manager.urlopen("http://yahoo.com/") + >>> len(r.pools) + 2 + + """ + + # TODO: Make sure there are no memory leaks here. + + def __init__(self, num_pools=10, **connection_pool_kw): + self.connection_pool_kw = connection_pool_kw + self.pools = RecentlyUsedContainer(num_pools) + + def connection_from_host(self, host, port=80, scheme='http'): + """ + Get a :class:`ConnectionPool` based on the host, port, and scheme. + + Note that an appropriate ``port`` value is required here to normalize + connection pools in our container most effectively. + """ + pool_key = (scheme, host, port) + + # If the scheme, host, or port doesn't match existing open connections, + # open a new ConnectionPool. + pool = self.pools.get(pool_key) + if pool: + return pool + + # Make a fresh ConnectionPool of the desired type + pool_cls = pool_classes_by_scheme[scheme] + pool = pool_cls(host, port, **self.connection_pool_kw) + + self.pools[pool_key] = pool + + return pool + + def connection_from_url(self, url): + """ + Similar to :func:`urllib3.connectionpool.connection_from_url` but + doesn't pass any additional parameters to the + :class:`urllib3.connectionpool.ConnectionPool` constructor. + + Additional parameters are taken from the :class:`.PoolManager` + constructor. + """ + scheme, host, port = get_host(url) + + port = port or port_by_scheme.get(scheme, 80) + + return self.connection_from_host(host, port=port, scheme=scheme) + + def urlopen(self, method, url, **kw): + """ + Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`. + + ``url`` must be absolute, such that an appropriate + :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. + """ + conn = self.connection_from_url(url) + return conn.urlopen(method, url, assert_same_host=False, **kw) + + +class ProxyManager(RequestMethods): + """ + Given a ConnectionPool to a proxy, the ProxyManager's ``urlopen`` method + will make requests to any url through the defined proxy. + """ + + def __init__(self, proxy_pool): + self.proxy_pool = proxy_pool + + def urlopen(self, method, url, **kw): + "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." + kw['assert_same_host'] = False + return self.proxy_pool.urlopen(method, url, **kw) + + +def proxy_from_url(url, **pool_kw): + proxy_pool = connection_from_url(url, **pool_kw) + return ProxyManager(proxy_pool) diff --git a/src/archivematicaCommon/lib/externals/requests/packages/urllib3/request.py b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/request.py new file mode 100644 index 0000000000..a7e0b5de26 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/request.py @@ -0,0 +1,145 @@ +# urllib3/request.py +# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + + +from urllib import urlencode + +from .filepost import encode_multipart_formdata + + +__all__ = ['RequestMethods'] + + +class RequestMethods(object): + """ + Convenience mixin for classes who implement a :meth:`urlopen` method, such + as :class:`~urllib3.connectionpool.HTTPConnectionPool` and + :class:`~urllib3.poolmanager.PoolManager`. + + Provides behavior for making common types of HTTP request methods and + decides which type of request field encoding to use. + + Specifically, + + :meth:`.request_encode_url` is for sending requests whose fields are encoded + in the URL (such as GET, HEAD, DELETE). + + :meth:`.request_encode_body` is for sending requests whose fields are + encoded in the *body* of the request using multipart or www-orm-urlencoded + (such as for POST, PUT, PATCH). + + :meth:`.request` is for making any kind of request, it will look up the + appropriate encoding format and use one of the above two methods to make + the request. + """ + + _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS']) + + _encode_body_methods = set(['PATCH', 'POST', 'PUT', 'TRACE']) + + def urlopen(self, method, url, body=None, headers=None, + encode_multipart=True, multipart_boundary=None, + **kw): + raise NotImplemented("Classes extending RequestMethods must implement " + "their own ``urlopen`` method.") + + def request(self, method, url, fields=None, headers=None, **urlopen_kw): + """ + Make a request using :meth:`urlopen` with the appropriate encoding of + ``fields`` based on the ``method`` used. + + This is a convenience method that requires the least amount of manual + effort. It can be used in most situations, while still having the option + to drop down to more specific methods when necessary, such as + :meth:`request_encode_url`, :meth:`request_encode_body`, + or even the lowest level :meth:`urlopen`. + """ + method = method.upper() + + if method in self._encode_url_methods: + return self.request_encode_url(method, url, fields=fields, + headers=headers, + **urlopen_kw) + else: + return self.request_encode_body(method, url, fields=fields, + headers=headers, + **urlopen_kw) + + def request_encode_url(self, method, url, fields=None, **urlopen_kw): + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the url. This is useful for request methods like GET, HEAD, DELETE, etc. + """ + if fields: + url += '?' + urlencode(fields) + return self.urlopen(method, url, **urlopen_kw) + + def request_encode_body(self, method, url, fields=None, headers=None, + encode_multipart=True, multipart_boundary=None, + **urlopen_kw): + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the body. This is useful for request methods like POST, PUT, PATCH, etc. + + When ``encode_multipart=True`` (default), then + :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode the + payload with the appropriate content type. Otherwise + :meth:`urllib.urlencode` is used with the + 'application/x-www-form-urlencoded' content type. + + Multipart encoding must be used when posting files, and it's reasonably + safe to use it in other times too. However, it may break request signing, + such as with OAuth. + + Supports an optional ``fields`` parameter of key/value strings AND + key/filetuple. A filetuple is a (filename, data) tuple. For example: :: + + fields = { + 'foo': 'bar', + 'fakefile': ('foofile.txt', 'contents of foofile'), + 'realfile': ('barfile.txt', open('realfile').read()), + 'nonamefile': ('contents of nonamefile field'), + } + + When uploading a file, providing a filename (the first parameter of the + tuple) is optional but recommended to best mimick behavior of browsers. + + Note that if ``headers`` are supplied, the 'Content-Type' header will be + overwritten because it depends on the dynamic random boundary string + which is used to compose the body of the request. The random boundary + string can be explicitly set with the ``multipart_boundary`` parameter. + """ + if encode_multipart: + body, content_type = encode_multipart_formdata(fields or {}, + boundary=multipart_boundary) + else: + body, content_type = (urlencode(fields or {}), + 'application/x-www-form-urlencoded') + + headers = headers or {} + headers.update({'Content-Type': content_type}) + + return self.urlopen(method, url, body=body, headers=headers, + **urlopen_kw) + + # Deprecated: + + def get_url(self, url, fields=None, **urlopen_kw): + """ + .. deprecated:: 1.0 + Use :meth:`request` instead. + """ + return self.request_encode_url('GET', url, fields=fields, + **urlopen_kw) + + def post_url(self, url, fields=None, headers=None, **urlopen_kw): + """ + .. deprecated:: 1.0 + Use :meth:`request` instead. + """ + return self.request_encode_body('POST', url, fields=fields, + headers=headers, + **urlopen_kw) diff --git a/src/archivematicaCommon/lib/externals/requests/packages/urllib3/response.py b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/response.py new file mode 100644 index 0000000000..4cd15c11b3 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/packages/urllib3/response.py @@ -0,0 +1,181 @@ +# urllib3/response.py +# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import gzip +import logging +import zlib + + +try: + from cStringIO import StringIO +except ImportError: + from StringIO import StringIO # pylint: disable-msg=W0404 + + +from .exceptions import HTTPError + + +log = logging.getLogger(__name__) + + +def decode_gzip(data): + gzipper = gzip.GzipFile(fileobj=StringIO(data)) + return gzipper.read() + + +def decode_deflate(data): + try: + return zlib.decompress(data) + except zlib.error: + return zlib.decompress(data, -zlib.MAX_WBITS) + + +class HTTPResponse(object): + """ + HTTP Response container. + + Backwards-compatible to httplib's HTTPResponse but the response ``body`` is + loaded and decoded on-demand when the ``data`` property is accessed. + + Extra parameters for behaviour not present in httplib.HTTPResponse: + + :param preload_content: + If True, the response's body will be preloaded during construction. + + :param decode_content: + If True, attempts to decode specific content-encoding's based on headers + (like 'gzip' and 'deflate') will be skipped and raw data will be used + instead. + + :param original_response: + When this HTTPResponse wrapper is generated from an httplib.HTTPResponse + object, it's convenient to include the original for debug purposes. It's + otherwise unused. + """ + + CONTENT_DECODERS = { + 'gzip': decode_gzip, + 'deflate': decode_deflate, + } + + def __init__(self, body='', headers=None, status=0, version=0, reason=None, + strict=0, preload_content=True, decode_content=True, + original_response=None, pool=None, connection=None): + self.headers = headers or {} + self.status = status + self.version = version + self.reason = reason + self.strict = strict + + self._decode_content = decode_content + self._body = None + self._fp = None + self._original_response = original_response + + self._pool = pool + self._connection = connection + + if hasattr(body, 'read'): + self._fp = body + + if preload_content: + self._body = self.read(decode_content=decode_content) + + def release_conn(self): + if not self._pool or not self._connection: + return + + self._pool._put_conn(self._connection) + self._connection = None + + @property + def data(self): + # For backwords-compat with earlier urllib3 0.4 and earlier. + if self._body: + return self._body + + if self._fp: + return self.read(decode_content=self._decode_content, + cache_content=True) + + def read(self, amt=None, decode_content=True, cache_content=False): + """ + Similar to :meth:`httplib.HTTPResponse.read`, but with two additional + parameters: ``decode_content`` and ``cache_content``. + + :param amt: + How much of the content to read. If specified, decoding and caching + is skipped because we can't decode partial content nor does it make + sense to cache partial content as the full response. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. (Overridden if ``amt`` is set.) + + :param cache_content: + If True, will save the returned data such that the same result is + returned despite of the state of the underlying file object. This + is useful if you want the ``.data`` property to continue working + after having ``.read()`` the file object. (Overridden if ``amt`` is + set.) + """ + content_encoding = self.headers.get('content-encoding') + decoder = self.CONTENT_DECODERS.get(content_encoding) + + data = self._fp and self._fp.read(amt) + + try: + + if amt: + return data + + if not decode_content or not decoder: + if cache_content: + self._body = data + + return data + + try: + data = decoder(data) + except IOError: + raise HTTPError("Received response with content-encoding: %s, but " + "failed to decode it." % content_encoding) + + if cache_content: + self._body = data + + return data + + finally: + + if self._original_response and self._original_response.isclosed(): + self.release_conn() + + @staticmethod + def from_httplib(r, **response_kw): + """ + Given an :class:`httplib.HTTPResponse` instance ``r``, return a + corresponding :class:`urllib3.response.HTTPResponse` object. + + Remaining parameters are passed to the HTTPResponse constructor, along + with ``original_response=r``. + """ + + return HTTPResponse(body=r, + headers=dict(r.getheaders()), + status=r.status, + version=r.version, + reason=r.reason, + strict=r.strict, + original_response=r, + **response_kw) + + # Backwards-compatibility methods for httplib.HTTPResponse + def getheaders(self): + return self.headers + + def getheader(self, name, default=None): + return self.headers.get(name, default) diff --git a/src/archivematicaCommon/lib/externals/requests/sessions.py b/src/archivematicaCommon/lib/externals/requests/sessions.py new file mode 100644 index 0000000000..247aa1830f --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/sessions.py @@ -0,0 +1,279 @@ +# -*- coding: utf-8 -*- + +""" +requests.session +~~~~~~~~~~~~~~~~ + +This module provides a Session object to manage and persist settings across +requests (cookies, auth, proxies). + +""" + +from .defaults import defaults +from .models import Request +from .hooks import dispatch_hook +from .utils import header_expand +from .packages.urllib3.poolmanager import PoolManager + + +def merge_kwargs(local_kwarg, default_kwarg): + """Merges kwarg dictionaries. + + If a local key in the dictionary is set to None, it will be removed. + """ + + if default_kwarg is None: + return local_kwarg + + if isinstance(local_kwarg, basestring): + return local_kwarg + + if local_kwarg is None: + return default_kwarg + + # Bypass if not a dictionary (e.g. timeout) + if not hasattr(default_kwarg, 'items'): + return local_kwarg + + # Update new values. + kwargs = default_kwarg.copy() + kwargs.update(local_kwarg) + + # Remove keys that are set to None. + for (k,v) in local_kwarg.items(): + if v is None: + del kwargs[k] + + return kwargs + + +class Session(object): + """A Requests session.""" + + __attrs__ = [ + 'headers', 'cookies', 'auth', 'timeout', 'proxies', 'hooks', + 'params', 'config'] + + + def __init__(self, + headers=None, + cookies=None, + auth=None, + timeout=None, + proxies=None, + hooks=None, + params=None, + config=None): + + self.headers = headers or {} + self.cookies = cookies or {} + self.auth = auth + self.timeout = timeout + self.proxies = proxies or {} + self.hooks = hooks or {} + self.params = params or {} + self.config = config or {} + + for (k, v) in defaults.items(): + self.config.setdefault(k, v) + + self.poolmanager = PoolManager( + num_pools=self.config.get('pool_connections'), + maxsize=self.config.get('pool_maxsize') + ) + + # Set up a CookieJar to be used by default + self.cookies = {} + + # Add passed cookies in. + if cookies is not None: + self.cookies.update(cookies) + + def __repr__(self): + return '' % (id(self)) + + def __enter__(self): + return self + + def __exit__(self, *args): + pass + + def request(self, method, url, + params=None, + data=None, + headers=None, + cookies=None, + files=None, + auth=None, + timeout=None, + allow_redirects=False, + proxies=None, + hooks=None, + return_response=True, + config=None, + prefetch=False): + + """Constructs and sends a :class:`Request `. + Returns :class:`Response ` object. + + :param method: method for the new :class:`Request` object. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. + :param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. + :param files: (optional) Dictionary of 'filename': file-like-objects for multipart encoding upload. + :param auth: (optional) Auth typle to enable Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) Float describing the timeout of the request. + :param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. + :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. + :param return_response: (optional) If False, an un-sent Request object will returned. + :param config: (optional) A configuration dictionary. + :param prefetch: (optional) if ``True``, the response content will be immediately downloaded. + """ + + method = str(method).upper() + + # Default empty dicts for dict params. + cookies = {} if cookies is None else cookies + data = {} if data is None else data + files = {} if files is None else files + headers = {} if headers is None else headers + params = {} if params is None else params + hooks = {} if hooks is None else hooks + # use session's hooks as defaults + for key, cb in self.hooks.iteritems(): + hooks.setdefault(key, cb) + + # Expand header values. + if headers: + for k, v in headers.items() or {}: + headers[k] = header_expand(v) + + args = dict( + method=method, + url=url, + data=data, + params=params, + headers=headers, + cookies=cookies, + files=files, + auth=auth, + hooks=hooks, + timeout=timeout, + allow_redirects=allow_redirects, + proxies=proxies, + config=config, + _poolmanager=self.poolmanager + ) + + # Merge local kwargs with session kwargs. + for attr in self.__attrs__: + session_val = getattr(self, attr, None) + local_val = args.get(attr) + + args[attr] = merge_kwargs(local_val, session_val) + + # Arguments manipulation hook. + args = dispatch_hook('args', args['hooks'], args) + + # Create the (empty) response. + r = Request(**args) + + # Give the response some context. + r.session = self + + # Don't send if asked nicely. + if not return_response: + return r + + # Send the HTTP Request. + r.send(prefetch=prefetch) + + # Send any cookies back up the to the session. + self.cookies.update(r.response.cookies) + + # Return the response. + return r.response + + + def get(self, url, **kwargs): + """Sends a GET request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param **kwargs: Optional arguments that ``request`` takes. + """ + + kwargs.setdefault('allow_redirects', True) + return self.request('get', url, **kwargs) + + + def options(self, url, **kwargs): + """Sends a OPTIONS request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param **kwargs: Optional arguments that ``request`` takes. + """ + + kwargs.setdefault('allow_redirects', True) + return self.request('options', url, **kwargs) + + + def head(self, url, **kwargs): + """Sends a HEAD request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param **kwargs: Optional arguments that ``request`` takes. + """ + + kwargs.setdefault('allow_redirects', True) + return self.request('head', url, **kwargs) + + + def post(self, url, data=None, **kwargs): + """Sends a POST request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. + :param **kwargs: Optional arguments that ``request`` takes. + """ + + return self.request('post', url, data=data, **kwargs) + + + def put(self, url, data=None, **kwargs): + """Sends a PUT request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. + :param **kwargs: Optional arguments that ``request`` takes. + """ + + return self.request('put', url, data=data, **kwargs) + + + def patch(self, url, data=None, **kwargs): + """Sends a PATCH request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. + :param **kwargs: Optional arguments that ``request`` takes. + """ + + return self.request('patch', url, data=data, **kwargs) + + + def delete(self, url, **kwargs): + """Sends a DELETE request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param **kwargs: Optional arguments that ``request`` takes. + """ + + return self.request('delete', url, **kwargs) + + +def session(**kwargs): + """Returns a :class:`Session` for context-management.""" + + return Session(**kwargs) diff --git a/src/archivematicaCommon/lib/externals/requests/status_codes.py b/src/archivematicaCommon/lib/externals/requests/status_codes.py new file mode 100644 index 0000000000..b017b7be53 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/status_codes.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- + +from .structures import LookupDict + +_codes = { + + # Informational. + 100: ('continue',), + 101: ('switching_protocols',), + 102: ('processing',), + 103: ('checkpoint',), + 122: ('uri_too_long', 'request_uri_too_long'), + 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/'), + 201: ('created',), + 202: ('accepted',), + 203: ('non_authoritative_info', 'non_authoritative_information'), + 204: ('no_content',), + 205: ('reset_content', 'reset'), + 206: ('partial_content', 'partial'), + 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), + 208: ('im_used',), + + # Redirection. + 300: ('multiple_choices',), + 301: ('moved_permanently', 'moved', '\\o-'), + 302: ('found',), + 303: ('see_other', 'other'), + 304: ('not_modified',), + 305: ('use_proxy',), + 306: ('switch_proxy',), + 307: ('temporary_redirect', 'temporary_moved', 'temporary'), + 308: ('resume_incomplete', 'resume'), + + # Client Error. + 400: ('bad_request', 'bad'), + 401: ('unauthorized',), + 402: ('payment_required', 'payment'), + 403: ('forbidden',), + 404: ('not_found', '-o-'), + 405: ('method_not_allowed', 'not_allowed'), + 406: ('not_acceptable',), + 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), + 408: ('request_timeout', 'timeout'), + 409: ('conflict',), + 410: ('gone',), + 411: ('length_required',), + 412: ('precondition_failed', 'precondition'), + 413: ('request_entity_too_large',), + 414: ('request_uri_too_large',), + 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), + 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), + 417: ('expectation_failed',), + 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), + 422: ('unprocessable_entity', 'unprocessable'), + 423: ('locked',), + 424: ('failed_dependency', 'dependency'), + 425: ('unordered_collection', 'unordered'), + 426: ('upgrade_required', 'upgrade'), + 428: ('precondition_required', 'precondition'), + 429: ('too_many_requests', 'too_many'), + 431: ('header_fields_too_large', 'fields_too_large'), + 444: ('no_response', 'none'), + 449: ('retry_with', 'retry'), + 450: ('blocked_by_windows_parental_controls', 'parental_controls'), + 499: ('client_closed_request',), + + # Server Error. + 500: ('internal_server_error', 'server_error', '/o\\'), + 501: ('not_implemented',), + 502: ('bad_gateway',), + 503: ('service_unavailable', 'unavailable'), + 504: ('gateway_timeout',), + 505: ('http_version_not_supported', 'http_version'), + 506: ('variant_also_negotiates',), + 507: ('insufficient_storage',), + 509: ('bandwidth_limit_exceeded', 'bandwidth'), + 510: ('not_extended',), +} + +codes = LookupDict(name='status_codes') + +for (code, titles) in _codes.items(): + for title in titles: + setattr(codes, title, code) + if not title.startswith('\\'): + setattr(codes, title.upper(), code) diff --git a/src/archivematicaCommon/lib/externals/requests/structures.py b/src/archivematicaCommon/lib/externals/requests/structures.py new file mode 100644 index 0000000000..7a79ef2c4d --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/structures.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- + +""" +requests.structures +~~~~~~~~~~~~~~~~~~~ + +Data structures that power Requests. + +""" + + +class CaseInsensitiveDict(dict): + """Case-insensitive Dictionary + + For example, ``headers['content-encoding']`` will return the + value of a ``'Content-Encoding'`` response header.""" + + @property + def lower_keys(self): + if not hasattr(self, '_lower_keys') or not self._lower_keys: + self._lower_keys = dict((k.lower(), k) for k in self.iterkeys()) + return self._lower_keys + + def _clear_lower_keys(self): + if hasattr(self, '_lower_keys'): + self._lower_keys.clear() + + def __setitem__(self, key, value): + dict.__setitem__(self, key, value) + self._clear_lower_keys() + + def __delitem__(self, key): + dict.__delitem__(self, key) + self._lower_keys.clear() + + def __contains__(self, key): + return key.lower() in self.lower_keys + + def __getitem__(self, key): + # We allow fall-through here, so values default to None + if key in self: + return dict.__getitem__(self, self.lower_keys[key.lower()]) + + def get(self, key, default=None): + if key in self: + return self[key] + else: + return default + +class LookupDict(dict): + """Dictionary lookup object.""" + + def __init__(self, name=None): + self.name = name + super(LookupDict, self).__init__() + + def __repr__(self): + return '' % (self.name) + + def __getitem__(self, key): + # We allow fall-through here, so values default to None + + return self.__dict__.get(key, None) + + def get(self, key, default=None): + return self.__dict__.get(key, default) diff --git a/src/archivematicaCommon/lib/externals/requests/utils.py b/src/archivematicaCommon/lib/externals/requests/utils.py new file mode 100644 index 0000000000..f31cad88d1 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/requests/utils.py @@ -0,0 +1,381 @@ +# -*- coding: utf-8 -*- + +""" +requests.utils +~~~~~~~~~~~~~~ + +This module provides utlity functions that are used within Requests +that are also useful for external consumption. + +""" + +import cgi +import codecs +import cookielib +import os +import random +import re +import zlib +import urllib + +from urllib2 import parse_http_list as _parse_list_header + + +def guess_filename(obj): + """Tries to guess the filename of the given object.""" + name = getattr(obj, 'name', None) + if name and name[0] != '<' and name[-1] != '>': + return name + +# From mitsuhiko/werkzeug (used with permission). +def parse_list_header(value): + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Quotes are removed automatically after parsing. + + It basically works like :func:`parse_set_header` just that items + may appear multiple times and case sensitivity is preserved. + + The return value is a standard :class:`list`: + + >>> parse_list_header('token, "quoted value"') + ['token', 'quoted value'] + + To create a header from the :class:`list` again, use the + :func:`dump_header` function. + + :param value: a string with a list header. + :return: :class:`list` + """ + result = [] + for item in _parse_list_header(value): + if item[:1] == item[-1:] == '"': + item = unquote_header_value(item[1:-1]) + result.append(item) + return result + + +# From mitsuhiko/werkzeug (used with permission). +def parse_dict_header(value): + """Parse lists of key, value pairs as described by RFC 2068 Section 2 and + convert them into a python dict: + + >>> d = parse_dict_header('foo="is a fish", bar="as well"') + >>> type(d) is dict + True + >>> sorted(d.items()) + [('bar', 'as well'), ('foo', 'is a fish')] + + If there is no value for a key it will be `None`: + + >>> parse_dict_header('key_without_value') + {'key_without_value': None} + + To create a header from the :class:`dict` again, use the + :func:`dump_header` function. + + :param value: a string with a dict header. + :return: :class:`dict` + """ + result = {} + for item in _parse_list_header(value): + if '=' not in item: + result[item] = None + continue + name, value = item.split('=', 1) + if value[:1] == value[-1:] == '"': + value = unquote_header_value(value[1:-1]) + result[name] = value + return result + + +# From mitsuhiko/werkzeug (used with permission). +def unquote_header_value(value, is_filename=False): + r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). + This does not use the real unquoting but what browsers are actually + using for quoting. + + :param value: the header value to unquote. + """ + if value and value[0] == value[-1] == '"': + # this is not the real unquoting, but fixing this so that the + # RFC is met will result in bugs with internet explorer and + # probably some other browsers as well. IE for example is + # uploading files with "C:\foo\bar.txt" as filename + value = value[1:-1] + + # if this is a filename and the starting characters look like + # a UNC path, then just return the value without quotes. Using the + # replace sequence below on a UNC path has the effect of turning + # the leading double slash into a single slash and then + # _fix_ie_filename() doesn't work correctly. See #458. + if not is_filename or value[:2] != '\\\\': + return value.replace('\\\\', '\\').replace('\\"', '"') + return value + + +def header_expand(headers): + """Returns an HTTP Header value string from a dictionary. + + Example expansion:: + + {'text/x-dvi': {'q': '.8', 'mxb': '100000', 'mxt': '5.0'}, 'text/x-c': {}} + # Accept: text/x-dvi; q=.8; mxb=100000; mxt=5.0, text/x-c + + (('text/x-dvi', {'q': '.8', 'mxb': '100000', 'mxt': '5.0'}), ('text/x-c', {})) + # Accept: text/x-dvi; q=.8; mxb=100000; mxt=5.0, text/x-c + """ + + collector = [] + + if isinstance(headers, dict): + headers = headers.items() + + elif isinstance(headers, basestring): + return headers + + for i, (value, params) in enumerate(headers): + + _params = [] + + for (p_k, p_v) in params.items(): + + _params.append('%s=%s' % (p_k, p_v)) + + collector.append(value) + collector.append('; ') + + if len(params): + + collector.append('; '.join(_params)) + + if not len(headers) == i+1: + collector.append(', ') + + + # Remove trailing separators. + if collector[-1] in (', ', '; '): + del collector[-1] + + return ''.join(collector) + + + +def randombytes(n): + """Return n random bytes.""" + # Use /dev/urandom if it is available. Fall back to random module + # if not. It might be worthwhile to extend this function to use + # other platform-specific mechanisms for getting random bytes. + if os.path.exists("/dev/urandom"): + f = open("/dev/urandom") + s = f.read(n) + f.close() + return s + else: + L = [chr(random.randrange(0, 256)) for i in range(n)] + return "".join(L) + + +def dict_from_cookiejar(cj): + """Returns a key/value dictionary from a CookieJar. + + :param cj: CookieJar object to extract cookies from. + """ + + cookie_dict = {} + + for _, cookies in cj._cookies.items(): + for _, cookies in cookies.items(): + for cookie in cookies.values(): + # print cookie + cookie_dict[cookie.name] = cookie.value + + return cookie_dict + + +def cookiejar_from_dict(cookie_dict): + """Returns a CookieJar from a key/value dictionary. + + :param cookie_dict: Dict of key/values to insert into CookieJar. + """ + + # return cookiejar if one was passed in + if isinstance(cookie_dict, cookielib.CookieJar): + return cookie_dict + + # create cookiejar + cj = cookielib.CookieJar() + + cj = add_dict_to_cookiejar(cj, cookie_dict) + + return cj + + +def add_dict_to_cookiejar(cj, cookie_dict): + """Returns a CookieJar from a key/value dictionary. + + :param cj: CookieJar to insert cookies into. + :param cookie_dict: Dict of key/values to insert into CookieJar. + """ + + for k, v in cookie_dict.items(): + + cookie = cookielib.Cookie( + version=0, + name=k, + value=v, + port=None, + port_specified=False, + domain='', + domain_specified=False, + domain_initial_dot=False, + path='/', + path_specified=True, + secure=False, + expires=None, + discard=True, + comment=None, + comment_url=None, + rest={'HttpOnly': None}, + rfc2109=False + ) + + # add cookie to cookiejar + cj.set_cookie(cookie) + + return cj + + +def get_encodings_from_content(content): + """Returns encodings from given content string. + + :param content: bytestring to extract encodings from. + """ + + charset_re = re.compile(r']', flags=re.I) + + return charset_re.findall(content) + + +def get_encoding_from_headers(headers): + """Returns encodings from given HTTP Header Dict. + + :param headers: dictionary to extract encoding from. + """ + + content_type = headers.get('content-type') + + if not content_type: + return None + + content_type, params = cgi.parse_header(content_type) + + if 'charset' in params: + return params['charset'].strip("'\"") + + +def unicode_from_html(content): + """Attempts to decode an HTML string into unicode. + If unsuccessful, the original content is returned. + """ + + encodings = get_encodings_from_content(content) + + for encoding in encodings: + + try: + return unicode(content, encoding) + except (UnicodeError, TypeError): + pass + + return content + + +def stream_decode_response_unicode(iterator, r): + """Stream decodes a iterator.""" + + if r.encoding is None: + for item in iterator: + yield item + return + + decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace') + for chunk in iterator: + rv = decoder.decode(chunk) + if rv: + yield rv + rv = decoder.decode('', final=True) + if rv: + yield rv + + +def get_unicode_from_response(r): + """Returns the requested content back in unicode. + + :param r: Response object to get unicode content from. + + Tried: + + 1. charset from content-type + + 2. every encodings from ```` + + 3. fall back and replace all unicode characters + + """ + + tried_encodings = [] + + # Try charset from content-type + encoding = get_encoding_from_headers(r.headers) + + if encoding: + try: + return unicode(r.content, encoding) + except UnicodeError: + tried_encodings.append(encoding) + + # Fall back: + try: + return unicode(r.content, encoding, errors='replace') + except TypeError: + return r.content + + +def decode_gzip(content): + """Return gzip-decoded string. + + :param content: bytestring to gzip-decode. + """ + + return zlib.decompress(content, 16 + zlib.MAX_WBITS) + + +def stream_decode_gzip(iterator): + """Stream decodes a gzip-encoded iterator""" + try: + dec = zlib.decompressobj(16 + zlib.MAX_WBITS) + for chunk in iterator: + rv = dec.decompress(chunk) + if rv: + yield rv + buf = dec.decompress('') + rv = buf + dec.flush() + if rv: + yield rv + except zlib.error: + pass + + +def requote_path(path): + """Re-quote the given URL path component. + + This function passes the given path through an unquote/quote cycle to + ensure that it is fully and consistenty quoted. + """ + parts = path.split("/") + parts = (urllib.quote(urllib.unquote(part), safe="") for part in parts) + return "/".join(parts) diff --git a/src/archivematicaCommon/lib/externals/singleInstance.py b/src/archivematicaCommon/lib/externals/singleInstance.py new file mode 100755 index 0000000000..3bc1878f9d --- /dev/null +++ b/src/archivematicaCommon/lib/externals/singleInstance.py @@ -0,0 +1,102 @@ +#!/usr/bin/python -OO +#Author Larry Bates http://code.activestate.com/recipes/users/651848/ +#Source {{{ http://code.activestate.com/recipes/546512/ (r1) +#license: PSF http://docs.python.org/license.html + +#Modified for archivematica - added Kill method + +import commands +import os +import sys +import time + +class singleinstance(object): + ''' + singleinstance - based on Windows version by Dragan Jovelic this is a Linux + version that accomplishes the same task: make sure that + only a single instance of an application is running. + + ''' + + def __init__(self, pidPath): + ''' + pidPath - full path/filename where pid for running application is to be + stored. Often this is ./var/.pid + ''' + self.pidPath=pidPath + # + # See if pidFile exists + # + if os.path.exists(pidPath): + # + # Make sure it is not a "stale" pidFile + # + pidFile=open(pidPath, 'r') + pid = pidFile.read().strip() + self.pid = pid + pidFile.close() + # + # Check list of running pids, if not running it is stale so + # overwrite + # + pidRunning=commands.getoutput('ls /proc | grep %s' % pid) + if pidRunning: + self.lasterror=True + + else: + self.lasterror=False + + else: + self.lasterror=False + + if not self.lasterror: + # + # Write my pid into pidFile to keep multiple copies of program from + # running. + # + fp=open(pidPath, 'w') + pid = str(os.getpid()) + self.pid = pid + fp.write(pid) + fp.close() + + def alreadyrunning(self): + return self.lasterror + + #def __del__(self): + # if not self.lasterror: + # os.unlink(self.pidPath) + + def kill(self,level=9, timeToSleep=2): + if self.pid == str(os.getpid()): + print >>sys.stderr, "Killing self" + try: + os.kill(int(self.pid), level) + time.sleep(timeToSleep) + except OSError: + pidRunning = False + self.__init__(self.pidPath) + if self.pid != str(os.getpid()): + print self.pid + print self.pid, "is not", str(os.getpid()) + time.sleep(timeToSleep) + self.kill(level, timeToSleep) + + + +if __name__ == "__main__": + # + # do this at beginnig of your application + # + myapp = singleinstance() + # + # check is another instance of same program running + # + if myapp.alreadyrunning(): + sys.exit("Another instance of this program is already running") + + # + # not running, safe to continue... + # + print "No another instance is running, can continue here" +## end of http://code.activestate.com/recipes/546512/ }}} diff --git a/src/archivematicaCommon/lib/externals/xmltodict.py b/src/archivematicaCommon/lib/externals/xmltodict.py new file mode 100755 index 0000000000..0fe038a812 --- /dev/null +++ b/src/archivematicaCommon/lib/externals/xmltodict.py @@ -0,0 +1,158 @@ +#!/usr/bin/env python +import xml.parsers.expat + +__author__ = 'Martin Blech' +__version__ = '0.1.dev' +__license__ = 'MIT' + +class ParsingInterrupted(Exception): pass + +class DictSAXHandler: + def __init__(self, + item_depth=0, + xml_attribs=True, + item_callback=lambda *args: True, + attr_prefix='@', + cdata_key='#text', + force_cdata=False): + self.path = [] + self.stack = [] + self.data = None + self.item = None + self.item_depth = item_depth + self.xml_attribs = xml_attribs + self.item_callback = item_callback + self.attr_prefix = attr_prefix; + self.cdata_key = cdata_key + self.force_cdata = force_cdata + + def startElement(self, name, attrs): + self.path.append((name, attrs or None)) + if len(self.path) > self.item_depth: + self.stack.append((self.item, self.data)) + attrs = dict((self.attr_prefix+key, value) + for (key, value) in attrs.items()) + self.item = self.xml_attribs and attrs or None + self.data = None + + def endElement(self, name): + if len(self.path) == self.item_depth: + item = self.item + if item is None: + item = self.data + should_continue = self.item_callback(self.path, item) + if not should_continue: + raise ParsingInterrupted() + if len(self.stack): + item, data = self.item, self.data + self.item, self.data = self.stack.pop() + if self.force_cdata and item is None: + item = {} + if item is not None: + if data: + item[self.cdata_key] = data + self.push_data(name, item) + else: + self.push_data(name, data) + else: + self.item = self.data = None + self.path.pop() + + def characters(self, data): + if data.strip(): + if not self.data: + self.data = data + else: + self.data += data + + def push_data(self, key, data): + if self.item is None: + self.item = {} + try: + value = self.item[key] + if isinstance(value, list): + value.append(data) + else: + self.item[key] = [value, data] + except KeyError: + self.item[key] = data + +def parse(xml_input, *args, **kwargs): + """Parse the given XML input and convert it into a dictionary. + + `xml_input` can either be a `string` or a file-like object. + + If `xml_attribs` is `True`, element attributes are put in the dictionary + among regular child elements, using `@` as a prefix to avoid collisions. If + set to `False`, they are just ignored. + + Simple example:: + + >>> doc = xmltodict.parse(\"\"\" + ... + ... 1 + ... 2 + ... + ... \"\"\") + >>> doc['a']['@prop'] + u'x' + >>> doc['a']['b'] + [u'1', u'2'] + + If `item_depth` is `0`, the function returns a dictionary for the root + element (default behavior). Otherwise, it calls `item_callback` every time + an item at the specified depth is found and returns `None` in the end + (streaming mode). + + The callback function receives two parameters: the `path` from the document + root to the item (name-attribs pairs), and the `item` (dict). If the + callback's return value is false-ish, parsing will be stopped with the + :class:`ParsingInterrupted` exception. + + Streaming example:: + + >>> def handle(path, item): + ... print 'path:%s item:%s' % (path, item) + ... return True + ... + >>> xmltodict.parse(\"\"\" + ... + ... 1 + ... 2 + ... \"\"\", item_depth=2, item_callback=handle) + path:[(u'a', {u'prop': u'x'}), (u'b', None)] item:1 + path:[(u'a', {u'prop': u'x'}), (u'b', None)] item:2 + + """ + handler = DictSAXHandler(*args, **kwargs) + parser = xml.parsers.expat.ParserCreate() + parser.StartElementHandler = handler.startElement + parser.EndElementHandler = handler.endElement + parser.CharacterDataHandler = handler.characters + if hasattr(xml_input, 'read'): + parser.ParseFile(xml_input) + else: + parser.Parse(xml_input, True) + return handler.item + +if __name__ == '__main__': + import sys + import marshal + + (item_depth,) = sys.argv[1:] + item_depth = int(item_depth) + + def handle_item(item_type, item): + marshal.dump((item_type, item), sys.stdout) + return True + + try: + root = parse(sys.stdin, + item_depth=item_depth, + item_callback=handle_item) + if item_depth == 0: + handle_item([], root) + except KeyboardInterrupt: + pass + except IOError, e: + print e diff --git a/src/archivematicaCommon/lib/fileOperations.py b/src/archivematicaCommon/lib/fileOperations.py new file mode 100644 index 0000000000..c2e92e554a --- /dev/null +++ b/src/archivematicaCommon/lib/fileOperations.py @@ -0,0 +1,243 @@ +#!/usr/bin/python -OO +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaCommon +# @author Joseph Perry +# @version svn: $Id$ + +import os +import uuid +import sys +import databaseInterface +import shutil +from databaseFunctions import insertIntoFiles +from executeOrRunSubProcess import executeOrRun +from externals.checksummingTools import sha_for_file +from databaseFunctions import insertIntoEvents +import MySQLdb +from archivematicaFunctions import unicodeToStr + +def updateSizeAndChecksum(fileUUID, filePath, date, eventIdentifierUUID): + fileSize = os.path.getsize(filePath).__str__() + checksum = sha_for_file(filePath).__str__() + + sql = "UPDATE Files " + \ + "SET fileSize='" + fileSize +"', checksum='" + checksum + "' " + \ + "WHERE fileUUID='" + fileUUID + "'" + databaseInterface.runSQL(sql) + + insertIntoEvents(fileUUID=fileUUID, \ + eventIdentifierUUID=eventIdentifierUUID, \ + eventType="message digest calculation", \ + eventDateTime=date, \ + eventDetail="program=\"python\"; module=\"hashlib.sha256()\"", \ + eventOutcomeDetailNote=checksum) + + +def addFileToTransfer(filePathRelativeToSIP, fileUUID, transferUUID, taskUUID, date, sourceType="ingestion", eventDetail="", use="original"): + print filePathRelativeToSIP, fileUUID, transferUUID, taskUUID, date, sourceType, eventDetail, use + insertIntoFiles(fileUUID, filePathRelativeToSIP, date, transferUUID=transferUUID, use=use) + insertIntoEvents(fileUUID=fileUUID, \ + eventIdentifierUUID=taskUUID, \ + eventType=sourceType, \ + eventDateTime=date, \ + eventDetail=eventDetail, \ + eventOutcome="", \ + eventOutcomeDetailNote="") + +def addFileToSIP(filePathRelativeToSIP, fileUUID, sipUUID, taskUUID, date, sourceType="ingestion", use="original"): + insertIntoFiles(fileUUID, filePathRelativeToSIP, date, sipUUID=sipUUID, use=use) + insertIntoEvents(fileUUID=fileUUID, \ + eventIdentifierUUID=taskUUID, \ + eventType=sourceType, \ + eventDateTime=date, \ + eventDetail="", \ + eventOutcome="", \ + eventOutcomeDetailNote="") + +#Used to write to file +#@output - the text to append to the file +#@fileName - The name of the file to create, or append to. +#@returns - 0 if ok, non zero if error occured. +def writeToFile(output, fileName, writeWhite=False): + #print fileName + if not writeWhite and output.isspace(): + return 0 + if fileName and output: + #print "writing to: " + fileName + if fileName.startswith("<^Not allowed to write to file^> "): + return -1 + try: + f = open(fileName, 'a') + f.write(output.__str__()) + f.close() + os.chmod(fileName, 488) + except OSError, ose: + print >>sys.stderr, "output Error", ose + return -2 + except IOError as (errno, strerror): + print "I/O error({0}): {1}".format(errno, strerror) + return -3 + else: + print "No output, or file specified" + return 0 + +def checksumFile(filePath, fileUUID): + global transferDirectory + truePath = filePath.replace("transfer/", transferDirectory, 1) + checksum = sha_for_file(truePath) + utcDate = databaseInterface.getUTCDate() + + #Create Event + eventIdentifierUUID = uuid.uuid4().__str__() + eventType = "message digest calculation" + eventDateTime = utcDate + eventDetail = 'program="python"; module="hashlib.sha256()" ; file="/usr/lib/python2.6/hashlib.pyc"' + eventOutcome = "" + eventOutcomeDetailNote = checksum.__str__() + + databaseInterface.insertIntoEvents(fileUUID=fileUUID, \ + eventIdentifierUUID=eventIdentifierUUID, \ + eventType=eventType, \ + eventDateTime=eventDateTime, \ + eventDetail=eventDetail, \ + eventOutcome=eventOutcome, \ + eventOutcomeDetailNote=eventOutcomeDetailNote) + +def removeFileByFileUUID(fileUUID, utcDate = databaseInterface.getUTCDate()): + databaseInterface.runSQL("UPDATE Files " + \ + "SET removedTime='" + utcDate + "', currentLocation=NULL " + \ + "WHERE fileUUID='" + fileUUID + "'" ) + +def removeFile(filePath, utcDate = databaseInterface.getUTCDate()): + global separator + print "removing: ", filePath + filesWithMatchingPath = [] + + sqlLoggingLock.acquire() + #Find the file pk/UUID + c=MCPloggingSQL.database.cursor() + sql = """SELECT fileUUID FROM Files WHERE removedTime = 0 AND Files.currentLocation = '""" + MySQLdb.escape_string(filePath) + """';""" + c.execute(sql) + row = c.fetchone() + while row != None: + filesWithMatchingPath.append(row[0]) + row = c.fetchone() + sqlLoggingLock.release() + #Update the database + for file in filesWithMatchingPath: + eventIdentifierUUID = uuid.uuid4().__str__() + eventType = "file removed" + eventDateTime = utcDate + eventDetail = "" + eventOutcomeDetailNote = "removed from: " + filePath + + databaseInterface.insertIntoEvents(fileUUID=fileUUID, \ + eventIdentifierUUID=eventIdentifierUUID, \ + eventType=eventType, \ + eventDateTime=eventDateTime, \ + eventDetail=eventDetail, \ + eventOutcome=eventOutcome, \ + eventOutcomeDetailNote=eventOutcomeDetailNote) + removeFileByFileUUID(fileUUID, utcDate) + +def renameAsSudo(source, destination): + """Used to move/rename Directories that the archivematica user may or may not have writes to move""" + command = "sudo mv \"" + source + "\" \"" + destination + "\"" + if isinstance(command, unicode): + command = command.encode("utf-8") + exitCode, stdOut, stdError = executeOrRun("command", command, "", printing=False) + if exitCode: + print >>sys.stderr, "exitCode:", exitCode + print >>sys.stderr, stdOut + print >>sys.stderr, stdError + exit(exitCode) + + +def updateDirectoryLocation(src, dst, unitPath, unitIdentifier, unitIdentifierType, unitPathReplaceWith): + srcDB = src.replace(unitPath, unitPathReplaceWith) + if not srcDB.endswith("/") and srcDB != unitPathReplaceWith: + srcDB += "/" + dstDB = dst.replace(unitPath, unitPathReplaceWith) + if not dstDB.endswith("/") and dstDB != unitPathReplaceWith: + dstDB += "/" + sql = "SELECT Files.fileUUID, Files.currentLocation FROM Files WHERE removedTime = 0 AND Files.currentLocation LIKE '" + MySQLdb.escape_string(srcDB) + "%' AND " + unitIdentifierType + " = '" + unitIdentifier + "';" + rows = databaseInterface.queryAllSQL(sql) + for row in rows: + fileUUID = row[0] + location = row[1] + destDB = location.replace(srcDB, dstDB) + sql = """UPDATE Files SET currentLocation='%s' WHERE fileUUID='%s';""" % (MySQLdb.escape_string(destDB), fileUUID) + databaseInterface.runSQL(sql) + if os.path.isdir(dst): + if dst.endswith("/"): + dst += "." + else: + dst += "/." + print "moving: ", src, dst + shutil.move(src, dst) + +def updateFileLocation2(src, dst, unitPath, unitIdentifier, unitIdentifierType, unitPathReplaceWith): + """Dest needs to be the actual full destination path with filename.""" + srcDB = src.replace(unitPath, unitPathReplaceWith) + dstDB = dst.replace(unitPath, unitPathReplaceWith) + sql = "SELECT Files.fileUUID, Files.currentLocation FROM Files WHERE removedTime = 0 AND Files.currentLocation = '" + MySQLdb.escape_string(srcDB) + "' AND " + unitIdentifierType + " = '" + unitIdentifier + "';" + rows = databaseInterface.queryAllSQL(sql) + if len(rows) != 1: + print sys.stderr, len(rows), "rows", sql, rows + exit(4) + for row in rows: + fileUUID = row[0] + location = row[1] + sql = """UPDATE Files SET currentLocation='%s' WHERE fileUUID='%s';""" % (MySQLdb.escape_string(dstDB), fileUUID) + databaseInterface.runSQL(sql) + print "moving: ", src, dst + shutil.move(src, dst) + +#import lxml.etree as etree +def updateFileLocation(src, dst, eventType, eventDateTime, eventDetail, eventIdentifierUUID = uuid.uuid4().__str__(), fileUUID="None", sipUUID = None, transferUUID=None, eventOutcomeDetailNote = ""): + """If the file uuid is not provided, will use the sip uuid and old path to find the file uuid""" + src = unicodeToStr(src) + dst = unicodeToStr(dst) + fileUUID = unicodeToStr(fileUUID) + if not fileUUID or fileUUID == "None": + sql = "Need to define transferUUID or sipUUID" + if sipUUID: + sql = "SELECT Files.fileUUID FROM Files WHERE removedTime = 0 AND Files.currentLocation = '" + MySQLdb.escape_string(src) + "' AND Files.sipUUID = '" + sipUUID + "';" + elif transferUUID: + sql = "SELECT Files.fileUUID FROM Files WHERE removedTime = 0 AND Files.currentLocation = '" + MySQLdb.escape_string(src) + "' AND Files.transferUUID = '" + transferUUID + "';" + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + fileUUID = unicodeToStr(row[0]) + row = c.fetchone() + sqlLock.release() + + if eventOutcomeDetailNote == "": + eventOutcomeDetailNote = "Original name=\"%s\"; cleaned up name=\"%s\"" %(src, dst) + #eventOutcomeDetailNote = eventOutcomeDetailNote.decode('utf-8') + #CREATE THE EVENT + if not fileUUID: + print >>sys.stderr, "Unable to find file uuid for: ", src, " -> ", dst + exit(6) + insertIntoEvents(fileUUID=fileUUID, eventIdentifierUUID=eventIdentifierUUID, eventType=eventType, eventDateTime=eventDateTime, eventDetail=eventDetail, eventOutcome="", eventOutcomeDetailNote=eventOutcomeDetailNote) + + #UPDATE THE CURRENT FILE PATH + sql = """UPDATE Files SET currentLocation='%s' WHERE fileUUID='%s';""" % (MySQLdb.escape_string(dst), fileUUID) + databaseInterface.runSQL(sql) diff --git a/src/archivematicaCommon/lib/lxmletreenshelper.py b/src/archivematicaCommon/lib/lxmletreenshelper.py new file mode 100755 index 0000000000..63c3c72d3a --- /dev/null +++ b/src/archivematicaCommon/lib/lxmletreenshelper.py @@ -0,0 +1,39 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaCommon +# @author Joseph Perry +# @version svn: $Id$ +import os +import sys +requiredDirectories = ["objects", \ + "logs", \ + "metadata",\ + "metadata/submissionDocumentation"] + +def createStructuredDirectory(SIPDir): + for directory in requiredDirectories: + path = os.path.join(SIPDir, directory) + if not os.path.isdir(path): + os.makedirs(path) + +if __name__ == '__main__': + SIPDir = sys.argv[1] + createStructuredDirectory(SIPDir) diff --git a/src/archivematicaCommon/lib/playAudioFileInCVLC.py b/src/archivematicaCommon/lib/playAudioFileInCVLC.py new file mode 100755 index 0000000000..1683a45218 --- /dev/null +++ b/src/archivematicaCommon/lib/playAudioFileInCVLC.py @@ -0,0 +1,45 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaCommon +# @author Joseph Perry +# @version svn: $Id$ + +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +from executeOrRunSubProcess import executeOrRun + +def playAudioFile(filePath): + command = "cvlc --play-and-exit \"" + filePath + "\"" + exitCode, stdOut, stdError = executeOrRun("command", command, printing=False) + if exitCode != 0: + print stdOut + print >>sys.stderr, stdError + +def playAudioFileInThread(filePath): + import threading + t = threading.Thread(target=playAudioFile, args=(filePath,)) + t.daemon = True + t.start() + +if __name__ == '__main__': + filePath = sys.argv[1] + playAudioFile(filePath) + playAudioFileInThread(filePath) diff --git a/src/archivematicaCommon/lib/sharedVariablesAcrossModules.py b/src/archivematicaCommon/lib/sharedVariablesAcrossModules.py new file mode 100755 index 0000000000..1af4d05f82 --- /dev/null +++ b/src/archivematicaCommon/lib/sharedVariablesAcrossModules.py @@ -0,0 +1,28 @@ +#!/usr/bin/python -OO + +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage archivematicaCommon +# @author Joseph Perry +# @version svn: $Id$ + +class SharedVariablesAcrossModules: + "used to share variables across multiple files" + +sharedVariablesAcrossModules = SharedVariablesAcrossModules() diff --git a/src/archivematicaCommon/lib/utilities/rebuild-elasticsearch-aip-index-from-files b/src/archivematicaCommon/lib/utilities/rebuild-elasticsearch-aip-index-from-files new file mode 100755 index 0000000000..916cf81a81 --- /dev/null +++ b/src/archivematicaCommon/lib/utilities/rebuild-elasticsearch-aip-index-from-files @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +import os, sys, subprocess, tempfile, shutil, ConfigParser +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import elasticSearchFunctions, databaseInterface +sys.path.append("/usr/lib/archivematica/archivematicaCommon/externals") +import pyes + +# Determine root of shared directories +clientConfigFilePath = '/etc/archivematica/MCPClient/clientConfig.conf' +config = ConfigParser.SafeConfigParser() +config.read(clientConfigFilePath) + +try: + sharedDirectory = config.get('MCPClient', "sharedDirectoryMounted") +except: + print "Configuration item 'sharedDirectoryMounted' not available at /etc/archivematica/MCPClient/clientConfig.conf." + quit(1) + +# Clear database backups of indexed AIPs +sql = "DELETE FROM ElasticsearchIndexBackup WHERE indexName='aips' AND typeName='aip'" +databaseInterface.runSQL(sql) + +rootdir = os.path.join(sharedDirectory, 'www/AIPsStore') + +print "Rebuilding AIPS index from AIPS in " + rootdir + "..." + +def processAIP(path, temp_dir): + archive_file = os.path.basename(path) + subdir = archive_file[:-3] + uuid = archive_file[-39:-3] + mets_file_relative_path = subdir + "/data/METS." + uuid + ".xml" + + print 'Processing AIP ' + uuid + '...' + + command_data = [ + '7za', + 'e', + '-o' + temp_dir, + path, + mets_file_relative_path + ] + + subprocess.call(command_data) + + elasticSearchFunctions.connect_and_index('aips', 'aip', uuid, temp_dir) + +conn = pyes.ES('127.0.0.1:9200') + +try: + conn.delete_index('aips') +except: + print "Error deleting index or index already deleted." + +temp_dir = tempfile.mkdtemp() + +for root, subFolders, files in os.walk(rootdir): + for file in files: + try: + file.index('.7z') + processAIP(os.path.join(root, file), temp_dir) + except: + pass + +print "Cleaning up..." + +shutil.rmtree(temp_dir) + +print "Indexing complete." diff --git a/src/archivematicaCommon/lib/utilities/rebuild-elasticsearch-index-from-database b/src/archivematicaCommon/lib/utilities/rebuild-elasticsearch-index-from-database new file mode 100755 index 0000000000..ee82b61320 --- /dev/null +++ b/src/archivematicaCommon/lib/utilities/rebuild-elasticsearch-index-from-database @@ -0,0 +1,59 @@ +#!/usr/bin/env python + +import sys, MySQLdb, cPickle, base64 +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +sys.path.append("/usr/lib/archivematica/archivematicaCommon/externals") +import pyes + +def index_documents_from_database(index, type): + indexed = 0 + + sql = "SELECT docId, data FROM ElasticsearchIndexBackup WHERE indexName='%s' AND typeName='%s'" % (index, type) + c, sqlLock = databaseInterface.querySQL(sql) + row = c.fetchone() + while row != None: + sys.stdout.write('.') + docId = row[0].__str__() + data = cPickle.loads(base64.decodestring(str(row[1]))) + result = conn.index(data, index, type, docId) + indexed = indexed + 1 + row = c.fetchone() + sqlLock.release() + + return indexed + +# establish allows index names and document types +indexTypes = { + 'transfers': 'transfer', + 'aips': 'aip' +} + +# make sure command-line usage and specified index name are valid +if len(sys.argv) != 2: + print 'Usage: %s ' % (sys.argv[0]) + sys.exit(1) +else: + index = sys.argv[1] + if index not in indexTypes: + print 'Index name must be "aips" or "transfers".' + sys.exit(1) + else: + type = indexTypes[index] + +conn = pyes.ES('127.0.0.1:9200') + +# delete index if it exists +try: + conn.delete_index(index) +except: + print "Error deleting %s index or index already deleted." % (index) + +# create new index +conn.create_index(index) + +print 'Indexing data.' + +indexed = index_documents_from_database(index, type) +print +print '%d documents indexed.' % (indexed) diff --git a/src/dashboard/debian/archivematica-dashboard.install b/src/dashboard/debian/archivematica-dashboard.install new file mode 100644 index 0000000000..fe4ffc6242 --- /dev/null +++ b/src/dashboard/debian/archivematica-dashboard.install @@ -0,0 +1,2 @@ +src/* /usr/share/archivematica/dashboard/ + diff --git a/src/dashboard/debian/control b/src/dashboard/debian/control new file mode 100644 index 0000000000..bf6210ae41 --- /dev/null +++ b/src/dashboard/debian/control @@ -0,0 +1,14 @@ +Source: archivematica-dashboard +Section: utils +Priority: extra +Maintainer: Austin Trask +Build-Depends: debhelper (>= 7) +Standards-Version: 3.8.3 +Homepage: http://archivematica.org + +Package: archivematica-dashboard +Architecture: any +Depends: ${shlibs:Depends}, ${misc:Depends}, libapache2-mod-wsgi, apache2-mpm-prefork, python-django, python-django-doc, python-gearman, php5, php5-xsl, php5-mysql, php5-cli +Description: Web Dashboard for Archivematica + Web based dashboard interface + diff --git a/src/dashboard/debian/copyright b/src/dashboard/debian/copyright new file mode 100644 index 0000000000..3906e99c09 --- /dev/null +++ b/src/dashboard/debian/copyright @@ -0,0 +1,37 @@ +This work was packaged for Ubuntu by: + + Austin Trask + +It was downloaded from http://archivematica.org + +Upstream Author(s): + + Joseph Perry + Jesus Garcia Crespo + Austin Trask + Peter Van Garderen + Evelyn McLellan + +Copyright: + + Copyright (C) 2010-2012 Artefactual Systems Inc. + +License: + + This is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This software is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this software. If not, see . + + +The Debian packaging is: + + Copyright (C) 2010-2012 Artefactual Systems Inc. diff --git a/src/dashboard/debian/postinst b/src/dashboard/debian/postinst new file mode 100755 index 0000000000..329a29d9f9 --- /dev/null +++ b/src/dashboard/debian/postinst @@ -0,0 +1,4 @@ +#!/bin/sh + +a2enmod wsgi +/etc/init.d/apache2 restart diff --git a/src/dashboard/debian/rules b/src/dashboard/debian/rules new file mode 100755 index 0000000000..917d9bf25d --- /dev/null +++ b/src/dashboard/debian/rules @@ -0,0 +1,13 @@ +#!/usr/bin/make -f +# -*- makefile -*- +# Sample debian/rules that uses debhelper. +# This file was originally written by Joey Hess and Craig Small. +# As a special exception, when this file is copied by dh-make into a +# dh-make output file, you may use that output file without restriction. +# This special exception was added by Craig Small in version 0.37 of dh-make. + +# Uncomment this to turn on verbose mode. +#export DH_VERBOSE=1 + +%: + dh $@ diff --git a/src/dashboard/src/__init__.py b/src/dashboard/src/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/dashboard/src/apache/django.wsgi b/src/dashboard/src/apache/django.wsgi new file mode 100644 index 0000000000..600bfdfc77 --- /dev/null +++ b/src/dashboard/src/apache/django.wsgi @@ -0,0 +1,17 @@ +import os, sys +import django.core.handlers.wsgi + +# Ensure that the path does not get added multiple times +path = '/usr/share/archivematica/dashboard' +if path not in sys.path: + sys.path.append(path) + +os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' + +application = django.core.handlers.wsgi.WSGIHandler() + +# See http://blog.dscpl.com.au/2008/12/using-modwsgi-when-developing-django.html +from django.conf import settings +if settings.DEBUG: + import monitor + monitor.start(interval=1.0) diff --git a/src/dashboard/src/components/__init__.py b/src/dashboard/src/components/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/dashboard/src/components/accounts/__init__.py b/src/dashboard/src/components/accounts/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/dashboard/src/components/accounts/forms.py b/src/dashboard/src/components/accounts/forms.py new file mode 100644 index 0000000000..28e88d052e --- /dev/null +++ b/src/dashboard/src/components/accounts/forms.py @@ -0,0 +1,41 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +from django import forms +from django.contrib.auth.models import User +from django.contrib.auth.forms import UserChangeForm + +class UserChangeForm(UserChangeForm): + email = forms.EmailField(required=True) + + class Meta: + model = User + fields = ('username', 'first_name', 'last_name', 'email', 'is_active', 'is_superuser') + + def __init__(self, *args, **kwargs): + super(UserChangeForm, self).__init__(*args, **kwargs) + ## Hide fields when there is only one superuser + if 1 == User.objects.filter(is_superuser=True).count(): + del self.fields['is_active'] + del self.fields['is_superuser'] + + + def save(self, commit=True): + user = super(UserChangeForm, self).save(commit=False) + if commit: + user.save() + return user diff --git a/src/dashboard/src/components/accounts/urls.py b/src/dashboard/src/components/accounts/urls.py new file mode 100644 index 0000000000..b1ea0aa57d --- /dev/null +++ b/src/dashboard/src/components/accounts/urls.py @@ -0,0 +1,32 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +from django.conf.urls.defaults import patterns + +urlpatterns = patterns('components.accounts.views', + (r'^$', 'list'), + (r'add/$', 'add'), + (r'(?P\d+)/delete/$', 'delete'), + (r'(?P\d+)/edit/$', 'edit'), + (r'profile/$', 'edit'), + (r'list/$', 'list') +) + +urlpatterns += patterns('', + (r'login/$', 'django.contrib.auth.views.login', { 'template_name': 'accounts/login.html' }), + (r'logout/$', 'django.contrib.auth.views.logout_then_login') +) diff --git a/src/dashboard/src/components/accounts/views.py b/src/dashboard/src/components/accounts/views.py new file mode 100644 index 0000000000..7c1db57d6d --- /dev/null +++ b/src/dashboard/src/components/accounts/views.py @@ -0,0 +1,89 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +from django.contrib.auth.decorators import user_passes_test +from django.contrib.auth.forms import UserCreationForm +from django.contrib.auth.models import User +from django.core.urlresolvers import reverse +from django.http import Http404, HttpResponse, HttpResponseBadRequest, HttpResponseRedirect +from django.shortcuts import render +from django.views.decorators.csrf import csrf_protect + +from components.accounts.forms import UserChangeForm + +@user_passes_test(lambda u: u.is_superuser, login_url='/forbidden/') +def list(request): + users = User.objects.all() + return render(request, 'accounts/list.html', locals()) + +@user_passes_test(lambda u: u.is_superuser, login_url='/forbidden/') +def add(request): + if request.method == 'POST': + form = UserCreationForm(request.POST) + if form.is_valid(): + user = form.save(commit=False) + user.is_staff = True + user.save() + return HttpResponseRedirect(reverse('components.accounts.views.list')) + else: + form = UserCreationForm() + + return render(request, 'accounts/add.html', {'form': form }) + +def edit(request, id=None): + # Security check + if request.user.id != id: + if request.user.is_superuser is False: + return HttpResponseRedirect(reverse('main.views.forbidden')) + # Load user + if id is None: + user = request.user + title = 'Edit your profile (%s)' % user + else: + try: + user = User.objects.get(pk=id) + title = 'Edit user %s' % user + except: + raise Http404 + # Form + if request.method == 'POST': + form = UserChangeForm(request.POST, instance=user) + if form.is_valid(): + user = form.save(commit=False) + user.save() + return HttpResponseRedirect(reverse('components.accounts.views.list')) + else: + form = UserChangeForm(instance=user) + return render(request, 'accounts/edit.html', {'form': form, 'user': user, 'title': title }) + +def delete(request, id): + # Security check + if request.user.id != id: + if request.user.is_superuser is False: + return HttpResponseRedirect(reverse('main.views.forbidden')) + # Avoid removing the last user + if 1 == User.objects.count(): + return HttpResponseRedirect(reverse('main.views.forbidden')) + # Delete + try: + user = User.objects.get(pk=id) + if request.user.username == user.username: + raise Http404 + user.delete() + return HttpResponseRedirect(reverse('components.accounts.views.list')) + except: + raise Http404 diff --git a/src/dashboard/src/components/archival_storage/__init__.py b/src/dashboard/src/components/archival_storage/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/dashboard/src/components/archival_storage/forms.py b/src/dashboard/src/components/archival_storage/forms.py new file mode 100644 index 0000000000..994eab67f0 --- /dev/null +++ b/src/dashboard/src/components/archival_storage/forms.py @@ -0,0 +1,7 @@ +from django import forms +from django.forms.widgets import TextInput, Textarea + +INPUT_ATTRS = {'class': 'span11'} + +class StorageSearchForm(forms.Form): + query = forms.CharField(label='', required=False, widget=TextInput(attrs=INPUT_ATTRS)) diff --git a/src/dashboard/src/components/archival_storage/urls.py b/src/dashboard/src/components/archival_storage/urls.py new file mode 100644 index 0000000000..118c3b7d11 --- /dev/null +++ b/src/dashboard/src/components/archival_storage/urls.py @@ -0,0 +1,9 @@ +from django.conf.urls.defaults import patterns + +urlpatterns = patterns('components.archival_storage.views', + (r'page/(?P\d+)/$', 'archival_storage_page'), + (r'search/json/file/(?P\w+)/$', 'archival_storage_file_json'), + (r'search/$', 'archival_storage_search'), + (r'(?PAIPsStore/[0-9a-z]{4}/[0-9a-z]{3}/[0-9a-z]{4}/[0-9a-z]{4}/[0-9a-z]{4}/[0-9a-z]{4}/[0-9a-z]{4}/.*\.(7z|zip))/$', 'archival_storage_sip_download'), + (r'$', 'archival_storage') +) diff --git a/src/dashboard/src/components/archival_storage/views.py b/src/dashboard/src/components/archival_storage/views.py new file mode 100644 index 0000000000..e067d9bd52 --- /dev/null +++ b/src/dashboard/src/components/archival_storage/views.py @@ -0,0 +1,195 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +from django.core.paginator import Paginator, InvalidPage, EmptyPage +from django.shortcuts import render +from django.http import HttpResponse +from django.utils import simplejson +from components.archival_storage import forms +from main import models +from main import filesystem +import os +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon/externals") +import pyes +import httplib + +AIPSTOREPATH = '/var/archivematica/sharedDirectory/www/AIPsStore' + +def archival_storage(request): + return archival_storage_sip_display(request) + +def archival_storage_page(request, page=None): + return archival_storage_sip_display(request, page) + +def archival_storage_search(request): + query = request.GET.get('query', '') + + if query == '': + query = '*' + + # set pagination-related variables + items_per_page = 20 + + page = request.GET.get('page', 0) + if page == '': + page = 0 + page = int(page) + + start = page * items_per_page + 1 + + conn = pyes.ES('127.0.0.1:9200') + + # do fulltext search + q = pyes.StringQuery(query) + + try: + results = conn.search_raw(query=q, indices='aips', type='aip', start=start - 1, size=items_per_page) + except: + return HttpResponse('Error accessing index.') + + # augment result data + modifiedResults = [] + + for item in results.hits.hits: + clone = item._source.copy() + + # try to find AIP details in database + try: + aip = models.AIP.objects.get(sipuuid=clone['AIPUUID']) + clone['sipname'] = aip.sipname + clone['href'] = aip.filepath.replace(AIPSTOREPATH + '/', "AIPsStore/") + except: + aip = None + clone['sipname'] = False + + clone['filename'] = os.path.basename(clone['filePath']) + clone['document_id'] = item['_id'] + clone['document_id_no_hyphens'] = item['_id'].replace('-', '____') + + modifiedResults.append(clone) + + number_of_results = results.hits.total + + # use augmented result data + results = modifiedResults + + # limit end by total hits + end = start + items_per_page - 1 + if end > number_of_results: + end = number_of_results + + # determine the previous page, if any + previous_page = False + if page > 0: + previous_page = page - 1 + + # determine the next page, if any + next_page = False + if (items_per_page * (page + 1)) < number_of_results: + next_page = page + 1 + + # make sure results is set + try: + if results: + pass + except: + results = False + + form = forms.StorageSearchForm(initial={'query': query}) + return render(request, 'archival_storage/archival_storage_search.html', locals()) + +def archival_storage_indexed_count(index): + aip_indexed_file_count = 0 + try: + conn = pyes.ES('127.0.0.1:9200') + count_data = conn.count(indices=index) + aip_indexed_file_count = count_data.count + except: + pass + return aip_indexed_file_count + +def archival_storage_sip_download(request, path): + full_path = os.path.join(os.path.dirname(AIPSTOREPATH), path) + return filesystem.send_file(request, full_path) + +def archival_storage_sip_display(request, current_page_number=None): + form = forms.StorageSearchForm() + + total_size = 0 + + # get ElasticSearch stats + aip_indexed_file_count = archival_storage_indexed_count('aips') + + # get AIPs from DB + aips = models.AIP.objects.all() + + # handle pagination + p = Paginator(aips, 20) + current_page_number = 1 if current_page_number == None else int(current_page_number) + page = p.page(current_page_number) + has_next = page.has_next() + next_page = current_page_number + 1 + has_previous = page.has_previous() + previous_page = current_page_number - 1 + has_other_pages = page.has_other_pages() + + sips = [] + for aip in page.object_list: + sip = {} + sip['href'] = aip.filepath.replace(AIPSTOREPATH + '/', "AIPsStore/") + sip['name'] = aip.sipname + sip['uuid'] = aip.sipuuid + + sip['date'] = aip.sipdate + + try: + size = os.path.getsize(aip.filepath) / float(1024) / float(1024) + total_size = total_size + size + sip['size'] = '{0:.2f} MB'.format(size) + except: + sip['size'] = 'Removed' + + sips.append(sip) + + order_by = request.GET.get('order_by', 'name'); + sort_by = request.GET.get('sort_by', 'up'); + + def sort_aips(sip): + value = 0 + if 'name' == order_by: + value = sip['name'].lower() + else: + value = sip[order_by] + return value + sips = sorted(sips, key = sort_aips) + + if sort_by == 'down': + sips.reverse() + + total_size = '{0:.2f}'.format(total_size) + + return render(request, 'archival_storage/archival_storage.html', locals()) + +def archival_storage_file_json(request, document_id_modified): + document_id = document_id_modified.replace('____', '-') + conn = httplib.HTTPConnection("127.0.0.1:9200") + conn.request("GET", "/aips/aip/" + document_id) + response = conn.getresponse() + data = response.read() + pretty_json = simplejson.dumps(simplejson.loads(data), sort_keys=True, indent=2) + return HttpResponse(pretty_json, content_type='application/json') diff --git a/src/dashboard/src/components/decorators.py b/src/dashboard/src/components/decorators.py new file mode 100644 index 0000000000..ea3a08c423 --- /dev/null +++ b/src/dashboard/src/components/decorators.py @@ -0,0 +1,76 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +from django.db.models import Max +from django.conf import settings as django_settings +from django.core.exceptions import ObjectDoesNotExist +from django.core.urlresolvers import reverse +from django.core.paginator import Paginator, InvalidPage, EmptyPage +from django.db import connection, transaction +from django.forms.models import modelformset_factory, inlineformset_factory +from django.shortcuts import render_to_response, get_object_or_404, redirect, render +from django.http import Http404, HttpResponse, HttpResponseBadRequest, HttpResponseRedirect +from django.utils import simplejson +from django.utils.functional import wraps +from django.views.static import serve +from django.utils.functional import wraps +from django.template import RequestContext +from django.utils.dateformat import format +from contrib.mcp.client import MCPClient +from contrib import utils +from main import forms +from main import models +from main import filesystem +from lxml import etree +from lxml import objectify +import calendar +import cPickle +from datetime import datetime +import os +import re +import subprocess +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon/externals") +import pyes +from django.contrib.auth.decorators import user_passes_test +import urllib + +# Try to update context instead of sending new params +def load_jobs(view): + @wraps(view) + def inner(request, uuid, *args, **kwargs): + jobs = models.Job.objects.filter(sipuuid=uuid) + if 0 == jobs.count: + raise Http404 + kwargs['jobs'] = jobs + kwargs['name'] = utils.get_directory_name(jobs[0]) + return view(request, uuid, *args, **kwargs) + return inner + +# Requires confirmation from a prompt page before executing a request +# (see http://djangosnippets.org/snippets/1922/) +def confirm_required(template_name, context_creator, key='__confirm__'): + def decorator(func): + def inner(request, *args, **kwargs): + if request.POST.has_key(key): + return func(request, *args, **kwargs) + else: + context = context_creator and context_creator(request, *args, **kwargs) \ + or RequestContext(request) + return render_to_response(template_name, context) + return wraps(func)(inner) + return decorator diff --git a/src/dashboard/src/components/preservation_planning/__init__.py b/src/dashboard/src/components/preservation_planning/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/dashboard/src/components/preservation_planning/urls.py b/src/dashboard/src/components/preservation_planning/urls.py new file mode 100644 index 0000000000..a55f9db1ce --- /dev/null +++ b/src/dashboard/src/components/preservation_planning/urls.py @@ -0,0 +1,5 @@ +from django.conf.urls.defaults import patterns + +urlpatterns = patterns('components.preservation_planning.views', + (r'$', 'preservation_planning') +) diff --git a/src/dashboard/src/components/preservation_planning/views.py b/src/dashboard/src/components/preservation_planning/views.py new file mode 100644 index 0000000000..8f8e43513c --- /dev/null +++ b/src/dashboard/src/components/preservation_planning/views.py @@ -0,0 +1,83 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +from django.db import connection, transaction +from django.shortcuts import render + +def preservation_planning(request): + query="""SELECT + Groups.description, + FIBE.Extension, + CC.classification, + CT.TYPE, + CR.countAttempts, + CR.countOK, + CR.countNotOK, + CR.countAttempts - (CR.countOK + CR.countNotOK) AS countIncomplete, + Commands.PK AS CommandPK, + Commands.description, + Commands.command + FROM FileIDsByExtension AS FIBE + RIGHT OUTER JOIN FileIDs ON FIBE.FileIDs = FileIDs.pk + LEFT OUTER JOIN FileIDGroupMembers AS FIGM ON FIGM.fileID = FileIDs.pk + LEFT OUTER JOIN Groups on Groups.pk = FIGM.groupID + JOIN CommandRelationships AS CR ON FileIDs.pk = CR.FileID + JOIN Commands ON CR.command = Commands.pk + JOIN CommandClassifications AS CC on CR.commandClassification = CC.pk + JOIN CommandTypes AS CT ON Commands.commandType = CT.pk + WHERE + FIBE.Extension IS NOT NULL + AND FIBE.Extension NOT IN ('mboxi', 'pst') + AND CC.classification IN ('access', 'preservation') + ORDER BY Groups.description, FIBE.Extension, CC.classification""" + + cursor = connection.cursor() + cursor.execute(query) + planning = cursor.fetchall() + + url = { + 'Audio': 'http://archivematica.org/wiki/index.php?title=Audio', + 'Email': 'http://archivematica.org/wiki/index.php?title=Email', + 'Office Open XML': 'http://archivematica.org/wiki/index.php?title=Microsoft_Office_Open_XML', + 'Plain text': 'http://archivematica.org/wiki/index.php?title=Plain_text', + 'Portable Document Format': 'http://archivematica.org/wiki/index.php?title=Portable_Document_Format', + 'Presentation': 'http://archivematica.org/wiki/index.php?title=Presentation_files', + 'Raster Image': 'http://archivematica.org/wiki/index.php?title=Raster_images', + 'Raw Camera Image': 'http://archivematica.org/wiki/index.php?title=Raw_camera_files', + 'Spreadsheet': 'http://archivematica.org/wiki/index.php?title=Spreadsheets', + 'Vector Image': 'http://archivematica.org/wiki/index.php?title=Vector_images', + 'Video': 'http://archivematica.org/wiki/index.php?title=Video', + 'Word Processing': 'http://archivematica.org/wiki/index.php?title=Word_processing_files' + } + + file_types = [] + last_type = '' + for item in planning: + if last_type == item[0]: + row = file_types.pop() + else: + row = {} + row['type'] = last_type = item[0] # File type + if row['type'] in url: + row['url'] = url[row['type']] + row['extensions'] = [] + row['extensions'].append(item) # Extensions + file_types.append(row) + + cursor.close() + + return render(request, 'main/preservation_planning.html', locals()) diff --git a/src/dashboard/src/contrib/__init__.py b/src/dashboard/src/contrib/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/dashboard/src/contrib/mcp/__init__.py b/src/dashboard/src/contrib/mcp/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/dashboard/src/contrib/mcp/client.py b/src/dashboard/src/contrib/mcp/client.py new file mode 100644 index 0000000000..87c94c4656 --- /dev/null +++ b/src/dashboard/src/contrib/mcp/client.py @@ -0,0 +1,67 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +import gearman +import cPickle + +try: + import django.conf.settings as settings +except ImportError: + class Settings: + MCP_SERVER = ('localhost', 4730) + settings = Settings() + +class MCPClient: + + def __init__(self, host=settings.MCP_SERVER[0], port=settings.MCP_SERVER[1]): + self.server = "%s:%d" % (host, port) + + def execute(self, uuid, choice): + gm_client = gearman.GearmanClient([self.server]) + data = {} + data["jobUUID"] = uuid + data["chain"] = choice + completed_job_request = gm_client.submit_job("approveJob", cPickle.dumps(data), None) + #self.check_request_status(completed_job_request) + return + + def list(self): + gm_client = gearman.GearmanClient([self.server]) + completed_job_request = gm_client.submit_job("getJobsAwaitingApproval", "", None) + #self.check_request_status(completed_job_request) + return cPickle.loads(completed_job_request.result) + + def notifications(self): + gm_client = gearman.GearmanClient([self.server]) + completed_job_request = gm_client.submit_job("getNotifications", "", None) + #self.check_request_status(completed_job_request) + return cPickle.loads(completed_job_request.result) + + def check_request_status(self, job_request): + if job_request.complete: + self.results = cPickle.loads(job_request.result) + print "Task %s finished! Result: %s - %s" % (job_request.job.unique, job_request.state, self.results) + elif job_request.timed_out: + print >>sys.stderr, "Task %s timed out!" % job_request.unique + elif job_request.state == JOB_UNKNOWN: + print >>sys.stderr, "Task %s connection failed!" % job_request.unique + else: + print >>sys.stderr, "Task %s failed!" % job_request.unique + +if __name__ == '__main__': + mcpClient = MCPClient() + print mcpClient.list() diff --git a/src/dashboard/src/contrib/utils.py b/src/dashboard/src/contrib/utils.py new file mode 100644 index 0000000000..d45c858e8b --- /dev/null +++ b/src/dashboard/src/contrib/utils.py @@ -0,0 +1,51 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +import os + +def get_directory_size(path='.'): + total_size = 0 + for dirpath, dirnames, filenames in os.walk(path): + for f in filenames: + fp = os.path.join(dirpath, f) + total_size += os.path.getsize(fp) + return total_size + +def get_directory_name(job): + """ + Expected format: + %sharedPath%watchedDirectories/workFlowDecisions/createDip/ImagesSIP-69826e50-87a2-4370-b7bd-406fc8aad94f/ + """ + import re + + directory = job.directory + uuid = job.sipuuid + + try: + return re.search(r'^.*/(?P.*)-[\w]{8}(-[\w]{4}){3}-[\w]{12}[/]{0,1}$', directory).group('directory') + except: + pass + + try: + return re.search(r'^.*/(?P.*)/$', directory).group('directory') + except: + pass + + if directory: + return directory + else: + return uuid diff --git a/src/dashboard/src/installer/__init__.py b/src/dashboard/src/installer/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/dashboard/src/installer/forms.py b/src/dashboard/src/installer/forms.py new file mode 100644 index 0000000000..b93fac8bd5 --- /dev/null +++ b/src/dashboard/src/installer/forms.py @@ -0,0 +1,38 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +from django import forms +from django.contrib.auth.models import User +from django.contrib.auth.forms import UserCreationForm + +class SuperUserCreationForm(UserCreationForm): + email = forms.EmailField(required=True) + + class Meta: + model = User + fields = ("username", "email", "password1", "password2") + + def save(self, commit=True): + user = super(UserCreationForm, self).save(commit=False) + user.set_password(self.cleaned_data["password1"]) + user.email = self.cleaned_data["email"] + user.is_staff = True + user.is_active = True + user.is_superuser = True + if commit: + user.save() + return user diff --git a/src/dashboard/src/installer/middleware.py b/src/dashboard/src/installer/middleware.py new file mode 100644 index 0000000000..fd3f6e506b --- /dev/null +++ b/src/dashboard/src/installer/middleware.py @@ -0,0 +1,38 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +from django.conf import settings +from django.contrib.auth.models import User +from django.core.urlresolvers import reverse +from django.http import HttpResponseRedirect, HttpResponse + +from re import compile + +EXEMPT_URLS = [compile(settings.LOGIN_URL.lstrip('/'))] +if hasattr(settings, 'LOGIN_EXEMPT_URLS'): + EXEMPT_URLS += [compile(expr) for expr in settings.LOGIN_EXEMPT_URLS] + +class ConfigurationCheckMiddleware: + def process_request(self, request): + if User.objects.count() == 0: + if reverse('installer.views.welcome') != request.path_info: + return HttpResponseRedirect(reverse('installer.views.welcome')) + else: + if not request.user.is_authenticated(): + path = request.path_info.lstrip('/') + if not any(m.match(path) for m in EXEMPT_URLS): + return HttpResponseRedirect(settings.LOGIN_URL) diff --git a/src/dashboard/src/installer/urls.py b/src/dashboard/src/installer/urls.py new file mode 100644 index 0000000000..3920979bc7 --- /dev/null +++ b/src/dashboard/src/installer/urls.py @@ -0,0 +1,22 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +from django.conf.urls.defaults import patterns + +urlpatterns = patterns('installer.views', + (r'welcome/$', 'welcome'), +) diff --git a/src/dashboard/src/installer/views.py b/src/dashboard/src/installer/views.py new file mode 100644 index 0000000000..2c5cb122e8 --- /dev/null +++ b/src/dashboard/src/installer/views.py @@ -0,0 +1,44 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +from django.contrib.auth import authenticate, login +from django.contrib.auth.models import User +from django.core.urlresolvers import reverse +from django.shortcuts import render +from django.http import HttpResponse, HttpResponseRedirect + +from installer.forms import SuperUserCreationForm + +def welcome(request): + # This form will be only accessible when the database has no users + if 0 < User.objects.count(): + return HttpResponseRedirect(reverse('main.views.home')) + # Form + if request.method == 'POST': + form = SuperUserCreationForm(request.POST) + if form.is_valid(): + user = form.save() + user = authenticate(username=user.username, password=form.cleaned_data['password1']) + if user is not None: + login(request, user) + return HttpResponseRedirect(reverse('main.views.home')) + else: + form = SuperUserCreationForm() + + return render(request, 'installer/welcome.html', { + 'form': form, + }) diff --git a/src/dashboard/src/main/__init__.py b/src/dashboard/src/main/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/dashboard/src/main/filesystem.py b/src/dashboard/src/main/filesystem.py new file mode 100644 index 0000000000..bdf73731db --- /dev/null +++ b/src/dashboard/src/main/filesystem.py @@ -0,0 +1,466 @@ +from django.http import Http404, HttpResponse, HttpResponseBadRequest, HttpResponseRedirect +from django.db import connection +from django.utils import simplejson +import os +import shutil +import MySQLdb +import tempfile +from django.core.servers.basehttp import FileWrapper + +import sys +import uuid +import mimetypes +sys.path.append("/usr/lib/archivematica/archivematicaCommon") +import databaseInterface +import databaseFunctions +from archivematicaCreateStructuredDirectory import createStructuredDirectory + +# for unciode sorting support +import locale +locale.setlocale(locale.LC_ALL, '') + +SHARED_DIRECTORY_ROOT = '/var/archivematica/sharedDirectory' +ORIGINALS_DIR = SHARED_DIRECTORY_ROOT + '/transferBackups/originals' +ACTIVE_TRANSFER_DIR = SHARED_DIRECTORY_ROOT + '/watchedDirectories/activeTransfers' +STANDARD_TRANSFER_DIR = ACTIVE_TRANSFER_DIR + '/standardTransfer' +COMPLETED_TRANSFERS_DIR = SHARED_DIRECTORY_ROOT + '/watchedDirectories/SIPCreation/completedTransfers' + +def sorted_directory_list(path): + return sorted(os.listdir(path), cmp=locale.strcoll) + +def directory_to_dict(path, directory={}, entry=False): + # if starting traversal, set entry to directory root + if (entry == False): + entry = directory + # remove leading slash + entry['parent'] = os.path.dirname(path)[1:] + + # set standard entry properties + entry['name'] = os.path.basename(path) + entry['children'] = [] + + # define entries + entries = sorted_directory_list(path) + for file in entries: + new_entry = None + if file[0] != '.': + new_entry = {} + new_entry['name'] = file + entry['children'].append(new_entry) + + # if entry is a directory, recurse + child_path = os.path.join(path, file) + if new_entry != None and os.path.isdir(child_path) and os.access(child_path, os.R_OK): + directory_to_dict(child_path, directory, new_entry) + + # return fully traversed data + return directory + +def directory_children(request, basePath=False): + path = '' + if (basePath): + path = path + basePath + path = path + request.GET.get('base_path', '') + path = path + request.GET.get('path', '') + + response = {} + entries = [] + directories = [] + + for entry in sorted_directory_list(path): + if entry[0] != '.': + entries.append(entry) + entry_path = os.path.join(path, entry) + if os.path.isdir(entry_path) and os.access(entry_path, os.R_OK): + directories.append(entry) + + response = { + 'entries': entries, + 'directories': directories + } + + return HttpResponse( + simplejson.JSONEncoder().encode(response), + mimetype='application/json' + ) + +def directory_contents(path, contents=[]): + entries = sorted_directory_list(path) + for entry in entries: + contents.append(os.path.join(path, entry)) + entry_path = os.path.join(path, entry) + if os.path.isdir(entry_path) and os.access(entry_path, os.R_OK): + directory_contents(entry_path, contents) + return contents + +def contents(request): + path = request.GET.get('path', '/home') + response = directory_to_dict(path) + return HttpResponse( + simplejson.JSONEncoder().encode(response), + mimetype='application/json' + ) + +def delete(request): + filepath = request.POST.get('filepath', '') + filepath = os.path.join('/', filepath) + error = check_filepath_exists(filepath) + + if error == None: + filepath = os.path.join(filepath) + if os.path.isdir(filepath): + try: + shutil.rmtree(filepath) + except: + error = 'Error attempting to delete directory.' + else: + os.remove(filepath) + + response = {} + + if error != None: + response['message'] = error + response['error'] = True + else: + response['message'] = 'Delete successful.' + + return HttpResponse( + simplejson.JSONEncoder().encode(response), + mimetype='application/json' + ) + +def get_temp_directory(request): + temp_dir = tempfile.mkdtemp() + + response = {} + response['tempDir'] = temp_dir + + return HttpResponse( + simplejson.JSONEncoder().encode(response), + mimetype='application/json' + ) + +def copy_transfer_component(request): + transfer_name = request.POST.get('name', '') + path = request.POST.get('path', '') + destination = request.POST.get('destination', '') + + error = None + + if transfer_name == '': + error = 'No transfer name provided.' + else: + if path == '': + error = 'No path provided.' + else: + # if transfer compontent path leads to a ZIP file, treat as zipped + # bag + try: + path.lower().index('.zip') + shutil.copy(path, destination) + paths_copied = 1 + except: + transfer_dir = os.path.join(destination, transfer_name) + + # Create directory before it is used, otherwise shutil.copy() + # would that location to store a file + if not os.path.isdir(transfer_dir): + os.mkdir(transfer_dir) + + paths_copied = 0 + + # cycle through each path copying files/dirs inside it to transfer dir + for entry in sorted_directory_list(path): + entry_path = os.path.join(path, entry) + if os.path.isdir(entry_path): + destination_dir = os.path.join(transfer_dir, entry) + try: + shutil.copytree( + entry_path, + destination_dir + ) + except: + error = 'Error copying from ' + entry_path + ' to ' + destination_dir + '. (' + str(sys.exc_info()[0]) + ')' + else: + shutil.copy(entry_path, transfer_dir) + + paths_copied = paths_copied + 1 + + response = {} + + if error != None: + response['message'] = error + response['error'] = True + else: + response['message'] = 'Copied ' + str(paths_copied) + ' entries.' + + return HttpResponse( + simplejson.JSONEncoder().encode(response), + mimetype='application/json' + ) + +def copy_to_originals(request): + filepath = request.POST.get('filepath', '') + error = check_filepath_exists('/' + filepath) + + if error == None: + processingDirectory = '/var/archivematica/sharedDirectory/currentlyProcessing/' + sipName = os.path.basename(filepath) + #autoProcessSIPDirectory = ORIGINALS_DIR + autoProcessSIPDirectory = '/var/archivematica/sharedDirectory/watchedDirectories/SIPCreation/SIPsUnderConstruction/' + tmpSIPDir = os.path.join(processingDirectory, sipName) + "/" + destSIPDir = os.path.join(autoProcessSIPDirectory, sipName) + "/" + + sipUUID = uuid.uuid4().__str__() + + createStructuredDirectory(tmpSIPDir) + databaseFunctions.createSIP(destSIPDir.replace('/var/archivematica/sharedDirectory/', '%sharedPath%'), sipUUID) + + objectsDirectory = os.path.join('/', filepath, 'objects') + + #move the objects to the SIPDir + for item in os.listdir(objectsDirectory): + shutil.move(os.path.join(objectsDirectory, item), os.path.join(tmpSIPDir, "objects", item)) + + #moveSIPTo autoProcessSIPDirectory + shutil.move(tmpSIPDir, destSIPDir) + + """ + # confine destination to subdir of originals + filepath = os.path.join('/', filepath) + destination = os.path.join(ORIGINALS_DIR, os.path.basename(filepath)) + destination = pad_destination_filepath_if_it_already_exists(destination) + #error = 'Copying from ' + filepath + ' to ' + destination + '.' + try: + shutil.copytree( + filepath, + destination + ) + except: + error = 'Error copying from ' + filepath + ' to ' + destination + '.' + """ + + response = {} + + if error != None: + response['message'] = error + response['error'] = True + else: + response['message'] = 'Copy successful.' + + return HttpResponse( + simplejson.JSONEncoder().encode(response), + mimetype='application/json' + ) + +def copy_to_start_transfer(request): + filepath = request.POST.get('filepath', '') + type = request.POST.get('type', '') + + error = check_filepath_exists('/' + filepath) + + if error == None: + # confine destination to subdir of originals + filepath = os.path.join('/', filepath) + basename = os.path.basename(filepath) + + # default to standard transfer + type_paths = { + 'standard': 'standardTransfer', + 'unzipped bag': 'baggitDirectory', + 'zipped bag': 'baggitZippedDirectory', + 'dspace': 'Dspace', + 'maildir': 'maildir' + } + + try: + type_subdir = type_paths[type] + destination = os.path.join(ACTIVE_TRANSFER_DIR, type_subdir) + except KeyError: + destination = os.path.join(STANDARD_TRANSFER_DIR) + + # if transfer compontent path leads to a ZIP file, treat as zipped + # bag + try: + filepath.lower().index('.zip') + + shutil.copy(filepath, destination) + except: + destination = os.path.join(destination, basename) + + destination = pad_destination_filepath_if_it_already_exists(destination) + + try: + shutil.copytree( + filepath, + destination + ) + except: + error = 'Error copying from ' + entry_path + ' to ' + destination_dir + '. (' + str(sys.exc_info()[0]) + ')' + + response = {} + + if error != None: + response['message'] = error + response['error'] = True + else: + response['message'] = 'Copy successful.' + + return HttpResponse( + simplejson.JSONEncoder().encode(response), + mimetype='application/json' + ) + +def copy_from_arrange_to_completed(request): + return copy_to_originals(request) + """ + sourcepath = request.POST.get('filepath', '') + + error = check_filepath_exists('/' + sourcepath) + + if error == None: + sourcepath = os.path.join('/', sourcepath) + destination = os.path.join(COMPLETED_TRANSFERS_DIR, os.path.basename(sourcepath)) + + # do check if directory already exists + if os.path.exists(destination): + error = 'A transfer with this directory name has already been started.' + else: + try: + shutil.copytree( + sourcepath, + destination + ) + except: + error = 'Error copying from ' + filepath + ' to ' + destination + '.' + + response = {} + + if error != None: + response['message'] = error + response['error'] = True + else: + response['message'] = 'Transfer started.' + + return HttpResponse( + simplejson.JSONEncoder().encode(response), + mimetype='application/json' + ) + """ + +def copy_to_arrange(request): + sourcepath = request.POST.get('filepath', '') + destination = request.POST.get('destination', '') + + error = check_filepath_exists('/' + sourcepath) + + if error == None: + # use lookup path to cleanly find UUID + lookup_path = '%sharedPath%' + sourcepath[SHARED_DIRECTORY_ROOT.__len__():sourcepath.__len__()] + '/' + cursor = connection.cursor() + query = 'SELECT unitUUID FROM transfersAndSIPs WHERE currentLocation=%s LIMIT 1' + cursor.execute(query, (lookup_path, )) + possible_uuid_data = cursor.fetchone() + + if possible_uuid_data: + uuid = possible_uuid_data[0] + + # remove UUID from destination directory name + modified_basename = os.path.basename(sourcepath).replace('-' + uuid, '') + else: + modified_basename = os.path.basename(sourcepath) + + # confine destination to subdir of originals + sourcepath = os.path.join('/', sourcepath) + destination = os.path.join('/', destination) + '/' + modified_basename + # do a check making sure destination is a subdir of ARRANGE_DIR + destination = pad_destination_filepath_if_it_already_exists(destination) + + if os.path.isdir(sourcepath): + try: + shutil.copytree( + sourcepath, + destination + ) + except: + error = 'Error copying from ' + sourcepath + ' to ' + destination + '.' + + if error == None: + # remove any metadata and logs folders + for path in directory_contents(destination): + basename = os.path.basename(path) + if basename == 'metadata' or basename == 'logs': + if os.path.isdir(path): + shutil.rmtree(path) + else: + shutil.copy(sourcepath, destination) + + response = {} + + if error != None: + response['message'] = error + response['error'] = True + else: + response['message'] = 'Copy successful.' + + return HttpResponse( + simplejson.JSONEncoder().encode(response), + mimetype='application/json' + ) + +def check_filepath_exists(filepath): + error = None + if filepath == '': + error = 'No filepath provided.' + + # check if exists + if error == None and not os.path.exists(filepath): + error = 'Filepath ' + filepath + ' does not exist.' + + # check if is file or directory + + # check for trickery + try: + filepath.index('..') + error = 'Illegal path.' + except: + pass + + return error + +def pad_destination_filepath_if_it_already_exists(filepath, original=None, attempt=0): + if original == None: + original = filepath + attempt = attempt + 1 + if os.path.exists(filepath): + return pad_destination_filepath_if_it_already_exists(original + '_' + str(attempt), original, attempt) + return filepath + +def download(request): + return send_file(request, '/' + request.GET.get('filepath', '')) + +def send_file(request, filepath): + """ + Send a file through Django without loading the whole file into + memory at once. The FileWrapper will turn the file object into an + iterator for chunks of 8KB. + """ + filename = os.path.basename(filepath) + extension = os.path.splitext(filepath)[1].lower() + + wrapper = FileWrapper(file(filepath)) + response = HttpResponse(wrapper) + + # force download for certain filetypes + extensions_to_download = ['.7z', '.zip'] + + try: + index = extensions_to_download.index(extension) + response['Content-Type'] = 'application/force-download' + response['Content-Disposition'] = 'attachment; filename="' + filename + '"' + except: + mimetype = mimetypes.guess_type(filename)[0] + response['Content-type'] = mimetype + + response['Content-Length'] = os.path.getsize(filepath) + return response diff --git a/src/dashboard/src/main/forms.py b/src/dashboard/src/main/forms.py new file mode 100644 index 0000000000..3834395f3c --- /dev/null +++ b/src/dashboard/src/main/forms.py @@ -0,0 +1,140 @@ +from django import forms +from django.forms import ModelForm +from django.forms.models import modelformset_factory +from django.forms.widgets import TextInput, Textarea +from main import models + +TEXTAREA_ATTRS = {'rows': '4', 'class': 'span11'} +INPUT_ATTRS = {'class': 'span11'} + +class DublinCoreMetadataForm(forms.Form): + title = forms.CharField(required=False, widget=TextInput(attrs=INPUT_ATTRS)) + creator = forms.CharField(required=False, widget=TextInput(attrs=INPUT_ATTRS)) + subject = forms.CharField(required=False, widget=TextInput(attrs=INPUT_ATTRS)) + description = forms.CharField(required=False, widget=Textarea(attrs=TEXTAREA_ATTRS)) + publisher = forms.CharField(required=False, widget=TextInput(attrs=INPUT_ATTRS)) + contributor = forms.CharField(required=False, widget=TextInput(attrs=INPUT_ATTRS)) + date = forms.CharField(required=False, help_text='Use ISO 8061 (YYYY-MM-DD or YYYY-MM-DD/YYYY-MM-DD)', widget=TextInput(attrs=INPUT_ATTRS)) + type = forms.CharField(required=False, widget=TextInput(attrs=INPUT_ATTRS)) + format = forms.CharField(required=False, widget=TextInput(attrs=INPUT_ATTRS)) + identifier = forms.CharField(required=False, widget=TextInput(attrs=INPUT_ATTRS)) + source = forms.CharField(required=False, widget=TextInput(attrs=INPUT_ATTRS)) + relation = forms.CharField(required=False, label='Relation', widget=TextInput(attrs=INPUT_ATTRS)) + language = forms.CharField(required=False, help_text='Use ISO 3166', widget=TextInput(attrs=INPUT_ATTRS)) + coverage = forms.CharField(required=False, widget=TextInput(attrs=INPUT_ATTRS)) + rights = forms.CharField(required=False, widget=Textarea(attrs=TEXTAREA_ATTRS)) + +class AdministrationForm(forms.Form): + arguments = forms.CharField(required=False, widget=Textarea(attrs=TEXTAREA_ATTRS)) + +class RightsForm(ModelForm): + rightsbasis = forms.ChoiceField(label="Basis", choices=( + ('Copyright', 'Copyright'), + ('Statute', 'Statute'), + ('License', 'License'), + ('Donor', 'Donor'), + ('Policy', 'Policy'), + ('Other', 'Other') + )) + + class Meta: + model = models.RightsStatement + exclude = ( + 'id', + 'metadataappliestotype', + 'metadataappliestoidentifier', + 'rightsstatementidentifiertype', + 'rightsstatementidentifiervalue', + 'rightsholder',) + widgets = { + 'rightsnotes': Textarea(attrs=TEXTAREA_ATTRS), + 'rightsholder': TextInput(attrs=INPUT_ATTRS), } + +class RightsGrantedForm(ModelForm): + class Meta: + model = models.RightsStatementRightsGranted + widgets = { + 'act': TextInput(attrs=INPUT_ATTRS), + 'restriction': TextInput(attrs=INPUT_ATTRS), + 'startdate': TextInput(attrs=INPUT_ATTRS), + 'enddate': TextInput(attrs=INPUT_ATTRS), } + +class RightsGrantedNotesForm(ModelForm): + class Meta: + model = models.RightsStatementRightsGrantedNote + widgets = { + 'rightsgranted': TextInput(attrs=TEXTAREA_ATTRS), } + +class RightsCopyrightForm(ModelForm): + class Meta: + model = models.RightsStatementCopyright + widgets = { + 'copyrightstatus': TextInput(attrs=INPUT_ATTRS), + 'copyrightjurisdiction': TextInput(attrs=INPUT_ATTRS), + 'copyrightstatusdeterminationdate': TextInput(attrs=INPUT_ATTRS), + 'copyrightapplicablestartdate': TextInput(attrs=INPUT_ATTRS), + 'copyrightapplicableenddate': TextInput(attrs=INPUT_ATTRS), } + +class RightsStatementCopyrightDocumentationIdentifierForm(ModelForm): + class Meta: + model = models.RightsStatementCopyrightDocumentationIdentifier + widgets = { + 'copyrightdocumentationidentifiertype': TextInput(attrs=INPUT_ATTRS), + 'copyrightdocumentationidentifiervalue': TextInput(attrs=INPUT_ATTRS), + 'copyrightdocumentationidentifierrole': TextInput(attrs=INPUT_ATTRS), } + +class RightsCopyrightNoteForm(ModelForm): + class Meta: + model = models.RightsStatementCopyrightNote + widgets = { + 'copyrightnote': Textarea(attrs=TEXTAREA_ATTRS), } + +class RightsStatuteForm(ModelForm): + class Meta: + model = models.RightsStatementStatuteInformation + widgets = { + 'statutejurisdiction': TextInput(attrs=INPUT_ATTRS), + 'statutecitation': TextInput(attrs=INPUT_ATTRS), + 'statutedeterminationdate': TextInput(attrs=INPUT_ATTRS), + 'statuteapplicablestartdate': TextInput(attrs=INPUT_ATTRS), + 'statuteapplicableenddate': TextInput(attrs=INPUT_ATTRS), } + +class RightsStatuteNoteForm(ModelForm): + class Meta: + model = models.RightsStatementStatuteInformationNote + widgets = { + 'statutenote': Textarea(attrs=TEXTAREA_ATTRS), } + +class RightsOtherRightsForm(ModelForm): + class Meta: + model = models.RightsStatementOtherRightsInformation + widgets = { + 'otherrightsbasis': TextInput(attrs=INPUT_ATTRS), + 'otherrightsapplicablestartdate': TextInput(attrs=INPUT_ATTRS), + 'otherrightsapplicableenddate': TextInput(attrs=INPUT_ATTRS), } + +class RightsLicenseForm(ModelForm): + class Meta: + model = models.RightsStatementLicense + widgets = { + 'licensetype': TextInput(attrs=INPUT_ATTRS), + 'licensevalue': TextInput(attrs=INPUT_ATTRS), + 'licenseterms': TextInput(attrs=INPUT_ATTRS), + 'licenseapplicablestartdate': TextInput(attrs=INPUT_ATTRS), + 'licenseapplicableenddate': TextInput(attrs=INPUT_ATTRS), } + +class RightsLicenseNoteForm(ModelForm): + class Meta: + model = models.RightsStatementLicenseNote + widgets = { + 'licensenote': Textarea(attrs=TEXTAREA_ATTRS), } + +class MicroServiceChoiceReplacementDicForm(ModelForm): + class Meta: + model = models.MicroServiceChoiceReplacementDic + exclude = ( + 'id', + 'choiceavailableatlink',) + widgets = { + 'description': TextInput(attrs=INPUT_ATTRS), + 'replacementdic': Textarea(attrs=TEXTAREA_ATTRS), } diff --git a/src/dashboard/src/main/models.py b/src/dashboard/src/main/models.py new file mode 100644 index 0000000000..5427be0d83 --- /dev/null +++ b/src/dashboard/src/main/models.py @@ -0,0 +1,443 @@ +# This Django model module was auto-generated and then updated manually +# Needs some cleanups, make sure each model has its primary_key=True +# Feel free to rename the models, but don't rename db_table values or field names. + +from django.db import models +from contrib import utils +from django import forms +import ast +import main + +class Access(models.Model): + id = models.AutoField(primary_key=True, db_column='pk') + sipuuid = models.CharField(max_length=150, db_column='SIPUUID', blank=True) + # Qubit ID (slug) generated or preexisting if a new description was not created + resource = models.TextField(db_column='resource', blank=True) + # Before the UploadDIP micro-service is executed, a dialog shows up and ask the user + # the target archival description when the DIP will be deposited via SWORD + # This column is mandatory, the user won't be able to submit the form if this field is empty + target = models.TextField(db_column='target', blank=True) + # Human readable status of an upload (rsync progress percentage, etc) + status = models.TextField(db_column='status', blank=True) + # Machine readable status code of an upload + # 10 = Rsync is working + # 11 = Rsync finished successfully + # 12 = Rsync failed (then see self.exitcode to get rsync exit code) + # 13 = SWORD deposit will be executed + # 14 = Deposit done, Qubit returned code 200 (HTTP Created) + # - The deposited was created synchronously + # - At this point self.resource should contains the created Qubit resource + # 15 = Deposit done, Qubit returned code 201 (HTTP Accepted) + # - The deposited will be created asynchronously (Qubit has a job queue) + # - At this point self.resource should contains the created Qubit resource + # - ^ this resource could be under progres, ask to Qubit for the status + statuscode = models.IntegerField(null=True, db_column='statusCode', blank=True) + # Rsync exit code + exitcode = models.IntegerField(null=True, db_column='exitCode', blank=True) + # Timestamps + createdtime = models.DateTimeField(db_column='createdTime', auto_now_add=True) + updatedtime = models.DateTimeField(db_column='updatedTime', auto_now=True) + + class Meta: + db_table = u'Accesses' + + def get_title(self): + try: + jobs = main.models.Job.objects.filter(sipuuid=self.sipuuid) + return utils.get_directory_name(jobs[0]) + except: + return 'N/A' + +class DublinCoreManager(models.Manager): + + def get_sip_metadata(self, uuid): + return DublinCore.objects.get(metadataappliestotype__exact=1, metadataappliestoidentifier__exact=uuid) + +class DublinCore(models.Model): + id = models.AutoField(primary_key=True, db_column='pk') + metadataappliestotype = models.IntegerField(db_column='metadataAppliesToType') + metadataappliestoidentifier = models.CharField(max_length=50, blank=True, db_column='metadataAppliesToidentifier') + title = models.TextField(db_column='title', blank=True) + creator = models.TextField(db_column='creator', blank=True) + subject = models.TextField(db_column='subject', blank=True) + description = models.TextField(db_column='description', blank=True) + publisher = models.TextField(db_column='publisher', blank=True) + contributor = models.TextField(db_column='contributor', blank=True) + date = models.TextField(db_column='date', blank=True) + type = models.TextField(db_column='type', blank=True) + format = models.TextField(db_column='format', blank=True) + identifier = models.TextField(db_column='identifier', blank=True) + source = models.TextField(db_column='source', blank=True) + relation = models.TextField(db_column='relation', blank=True) + language = models.TextField(db_column='language', blank=True) + coverage = models.TextField(db_column='coverage', blank=True) + rights = models.TextField(db_column='rights', blank=True) + + objects = DublinCoreManager() + + class Meta: + db_table = u'Dublincore' + + def __unicode__(self): + if self.title: + return u'%s' % self.title + else: + return u'Untitled' + +class Job(models.Model): + jobuuid = models.CharField(max_length=150, primary_key=True, db_column='jobUUID') + jobtype = models.CharField(max_length=750, db_column='jobType', blank=True) + createdtime = models.DateTimeField(db_column='createdTime') + createdtimedec = models.DecimalField(null=True, db_column='createdTimeDec', blank=True, max_digits=24, decimal_places=10) + directory = models.CharField(max_length=750, blank=True) + sipuuid = models.CharField(max_length=150, db_column='SIPUUID', blank=True) + unittype = models.CharField(max_length=150, db_column='unitType', blank=True) + currentstep = models.CharField(max_length=150, db_column='currentStep', blank=True) + microservicegroup = models.CharField(max_length=150, db_column='microserviceGroup', blank=True) + subjobof = models.CharField(max_length=50, db_column='subJobOf', blank=True) + hidden = models.BooleanField(default=False, blank=False) + + class Meta: + db_table = u'Jobs' + +class SIPManager(models.Manager): + + def is_hidden(self, uuid): + try: + return SIP.objects.get(uuid__exact=uuid).hidden is True + except: + return False + +class SIP(models.Model): + uuid = models.CharField(max_length=150, primary_key=True, db_column='sipUUID') + createdtime = models.DateTimeField(db_column='createdTime') + currentpath = models.TextField(db_column='currentPath', blank=True) + # ... + hidden = models.BooleanField(default=False, blank=False) + + objects = SIPManager() + + class Meta: + db_table = u'SIPs' + +class AIP(models.Model): + sipuuid = models.CharField(max_length=150, primary_key=True, db_column='sipUUID') + sipname = models.CharField(max_length=150, primary_key=True, db_column='sipName') + sipdate = models.DateTimeField(db_column='sipDate') + createdtime = models.DateTimeField(db_column='createdTime') + filepath = models.TextField(db_column='filePath', blank=True) + + class Meta: + db_table = u'AIPs' + +class TransferManager(models.Manager): + + def is_hidden(self, uuid): + try: + return Transfer.objects.get(uuid__exact=uuid).hidden is True + except: + return False + +class Transfer(models.Model): + uuid = models.CharField(max_length=150, primary_key=True, db_column='transferUUID') + # ... + hidden = models.BooleanField(default=False, blank=False) + + objects = TransferManager() + + class Meta: + db_table = u'Transfers' + +class File(models.Model): + uuid = models.CharField(max_length=150, primary_key=True, db_column='fileUUID') + sip = models.ForeignKey(SIP, db_column='sipUUID', to_field = 'uuid') + transfer = models.ForeignKey(Transfer, db_column='transferUUID', to_field = 'uuid') + + class Meta: + db_table = u'Files' + +class Task(models.Model): + taskuuid = models.CharField(max_length=50, primary_key=True, db_column='taskUUID') + job = models.ForeignKey(Job, db_column='jobuuid', to_field = 'jobuuid') + createdtime = models.DateTimeField(db_column='createdTime') + fileuuid = models.CharField(max_length=50, db_column='fileUUID', blank=True) + filename = models.CharField(max_length=100, db_column='fileName', blank=True) + execution = models.CharField(max_length=50, db_column='exec', blank=True) + arguments = models.CharField(max_length=1000, blank=True) + starttime = models.DateTimeField(db_column='startTime') + client = models.CharField(max_length=50, blank=True) + endtime = models.DateTimeField(db_column='endTime') + stdout = models.TextField(db_column='stdOut', blank=True) + stderror = models.TextField(db_column='stdError', blank=True) + exitcode = models.IntegerField(null=True, db_column='exitCode', blank=True) + + class Meta: + db_table = u'Tasks' + +class JobStepCompleted(models.Model): + id = models.AutoField(primary_key=True, db_column='pk') + # jobuuid = models.CharField(max_length=50, db_column='jobUUID', blank=True) + job = models.ForeignKey(Job, db_column='jobuuid', to_field = 'jobuuid') + completedtime = models.DateTimeField(db_column='completedTime') + step = models.CharField(max_length=50, blank=True) + + class Meta: + db_table = u'jobStepCompleted' + +class RightsStatement(models.Model): + id = models.AutoField(primary_key=True, db_column='pk') + metadataappliestotype = models.IntegerField(Job, db_column='metadataAppliesToType') + metadataappliestoidentifier = models.CharField(max_length=50, blank=True, db_column='metadataAppliesToidentifier') + rightsstatementidentifiertype = models.TextField(db_column='rightsStatementIdentifierType', blank=True, verbose_name='Type') + rightsstatementidentifiervalue = models.TextField(db_column='rightsStatementIdentifierValue', blank=True, verbose_name='Value') + #rightsholder = models.TextField(db_column='fkAgent', blank=True, verbose_name='Rights holder') + rightsbasis = models.TextField(db_column='rightsBasis', verbose_name='Basis', blank=True) + + class Meta: + db_table = u'RightsStatement' + +class RightsStatementCopyright(models.Model): + id = models.AutoField(primary_key=True, db_column='pk', editable=False) + rightsstatement = models.ForeignKey(RightsStatement, db_column='fkRightsStatement') + copyrightstatus = models.TextField(db_column='copyrightStatus', blank=True, verbose_name='Copyright status') + copyrightjurisdiction = models.TextField(db_column='copyrightJurisdiction', blank=True, verbose_name='Copyright jurisdiction') + copyrightstatusdeterminationdate = models.TextField(db_column='copyrightStatusDeterminationDate', blank=True, verbose_name='Copyright determination date', help_text='Use ISO 8061 (YYYY-MM-DD)') + copyrightapplicablestartdate = models.TextField(db_column='copyrightApplicableStartDate', blank=True, verbose_name='Copyright start date', help_text='Use ISO 8061 (YYYY-MM-DD)') + copyrightapplicableenddate = models.TextField(db_column='copyrightApplicableEndDate', blank=True, verbose_name='Copyright end date', help_text='Use ISO 8061 (YYYY-MM-DD)') + copyrightenddateopen = models.BooleanField(db_column='copyrightApplicableEndDateOpen', verbose_name='Open End Date', help_text='Indicate end date is open') + + class Meta: + db_table = u'RightsStatementCopyright' + +class RightsStatementCopyrightDocumentationIdentifier(models.Model): + id = models.AutoField(primary_key=True, db_column='pk', editable=False) + rightscopyright = models.ForeignKey(RightsStatementCopyright, db_column='fkRightsStatementCopyrightInformation') + copyrightdocumentationidentifiertype = models.TextField(db_column='copyrightDocumentationIdentifierType', blank=True, verbose_name='Copyright document identification type') + copyrightdocumentationidentifiervalue = models.TextField(db_column='copyrightDocumentationIdentifierValue', blank=True, verbose_name='Copyright document identification value') + copyrightdocumentationidentifierrole = models.TextField(db_column='copyrightDocumentationIdentifierRole', blank=True, verbose_name='Copyright document identification role') + + class Meta: + db_table = u'RightsStatementCopyrightDocumentationIdentifier' + +class RightsStatementCopyrightNote(models.Model): + id = models.AutoField(primary_key=True, db_column='pk', editable=False) + rightscopyright = models.ForeignKey(RightsStatementCopyright, db_column='fkRightsStatementCopyrightInformation') + copyrightnote = models.TextField(db_column='copyrightNote', blank=True, verbose_name='Copyright note') + + class Meta: + db_table = u'RightsStatementCopyrightNote' + +class RightsStatementLicense(models.Model): + id = models.AutoField(primary_key=True, db_column='pk', editable=False) + rightsstatement = models.ForeignKey(RightsStatement, db_column='fkRightsStatement') + licenseterms = models.TextField(db_column='licenseTerms', blank=True, verbose_name='License terms') + licenseapplicablestartdate = models.TextField(db_column='licenseApplicableStartDate', blank=True, verbose_name='License start date', help_text='Use ISO 8061 (YYYY-MM-DD)') + licenseapplicableenddate = models.TextField(db_column='licenseApplicableEndDate', blank=True, verbose_name='License end date', help_text='Use ISO 8061 (YYYY-MM-DD)') + licenseenddateopen = models.BooleanField(db_column='licenseApplicableEndDateOpen', verbose_name='Open End Date', help_text='Indicate end date is open') + + class Meta: + db_table = u'RightsStatementLicense' + +class RightsStatementLicenseDocumentationIdentifier(models.Model): + id = models.AutoField(primary_key=True, db_column='pk', editable=False) + rightsstatementlicense = models.ForeignKey(RightsStatementLicense, db_column='fkRightsStatementLicense') + licensedocumentationidentifiertype = models.TextField(db_column='licenseDocumentationIdentifierType', blank=True, verbose_name='License documentation identification type') + licensedocumentationidentifiervalue = models.TextField(db_column='licenseDocumentationIdentifierValue', blank=True, verbose_name='License documentation identification value') + licensedocumentationidentifierrole = models.TextField(db_column='licenseDocumentationIdentifierRole', blank=True, verbose_name='License document identification role') + + class Meta: + db_table = u'RightsStatementLicenseDocumentationIdentifier' + +class RightsStatementLicenseNote(models.Model): + id = models.AutoField(primary_key=True, db_column='pk', editable=False) + rightsstatementlicense = models.ForeignKey(RightsStatementLicense, db_column='fkRightsStatementLicense') + licensenote = models.TextField(db_column='licenseNote', blank=True, verbose_name='License note') + + class Meta: + db_table = u'RightsStatementLicenseNote' + +class RightsStatementRightsGranted(models.Model): + id = models.AutoField(primary_key=True, db_column='pk') + rightsstatement = models.ForeignKey(RightsStatement, db_column='fkRightsStatement') + act = models.TextField(db_column='act', blank=True) + startdate = models.TextField(db_column='startDate', verbose_name='Start', help_text='Use ISO 8061 (YYYY-MM-DD)', blank=True) + enddate = models.TextField(db_column='endDate', verbose_name='End', help_text='Use ISO 8061 (YYYY-MM-DD)', blank=True) + enddateopen = models.BooleanField(db_column='endDateOpen', verbose_name='Open End Date', help_text='Indicate end date is open') + + class Meta: + db_table = u'RightsStatementRightsGranted' + +class RightsStatementRightsGrantedNote(models.Model): + id = models.AutoField(primary_key=True, db_column='pk', editable=False) + rightsgranted = models.ForeignKey(RightsStatementRightsGranted, db_column='fkRightsStatementRightsGranted') + rightsgrantednote = models.TextField(db_column='rightsGrantedNote', blank=True, verbose_name='Rights note') + + class Meta: + db_table = u'RightsStatementRightsGrantedNote' + +class RightsStatementRightsGrantedRestriction(models.Model): + id = models.AutoField(primary_key=True, db_column='pk') + rightsgranted = models.ForeignKey(RightsStatementRightsGranted, db_column='fkRightsStatementRightsGranted') + restriction = models.TextField(db_column='restriction', blank=True) + + class Meta: + db_table = u'RightsStatementRightsGrantedRestriction' + +class RightsStatementStatuteInformation(models.Model): + id = models.AutoField(primary_key=True, db_column='pk') + rightsstatement = models.ForeignKey(RightsStatement, db_column='fkRightsStatement') + statutejurisdiction = models.TextField(db_column='statuteJurisdiction', verbose_name='Statute jurisdiction', blank=True) + statutecitation = models.TextField(db_column='statuteCitation', verbose_name='Statute citation', blank=True) + statutedeterminationdate = models.TextField(db_column='statuteInformationDeterminationDate', verbose_name='Statute determination date', help_text='Use ISO 8061 (YYYY-MM-DD)', blank=True) + statuteapplicablestartdate = models.TextField(db_column='statuteApplicableStartDate', blank=True, verbose_name='Statute start date', help_text='Use ISO 8061 (YYYY-MM-DD)') + statuteapplicableenddate = models.TextField(db_column='statuteApplicableEndDate', blank=True, verbose_name='Statute end date', help_text='Use ISO 8061 (YYYY-MM-DD)') + statuteenddateopen = models.BooleanField(db_column='statuteApplicableEndDateOpen', verbose_name='Open End Date', help_text='Indicate end date is open') + + class Meta: + db_table = u'RightsStatementStatuteInformation' + +class RightsStatementStatuteInformationNote(models.Model): + id = models.AutoField(primary_key=True, db_column='pk') + rightsstatementstatute = models.ForeignKey(RightsStatementStatuteInformation, db_column='fkRightsStatementStatuteInformation') + statutenote = models.TextField(db_column='statuteNote', verbose_name='Statute note', blank=True) + + class Meta: + db_table = u'RightsStatementStatuteInformationNote' + +class RightsStatementStatuteDocumentationIdentifier(models.Model): + id = models.AutoField(primary_key=True, db_column='pk', editable=False) + rightsstatementstatute = models.ForeignKey(RightsStatementStatuteInformation, db_column='fkRightsStatementStatuteInformation') + statutedocumentationidentifiertype = models.TextField(db_column='statuteDocumentationIdentifierType', blank=True, verbose_name='Statute document identification type') + statutedocumentationidentifiervalue = models.TextField(db_column='statuteDocumentationIdentifierValue', blank=True, verbose_name='Statute document identification value') + statutedocumentationidentifierrole = models.TextField(db_column='statuteDocumentationIdentifierRole', blank=True, verbose_name='Statute document identification role') + + class Meta: + db_table = u'RightsStatementStatuteDocumentationIdentifier' + +class RightsStatementOtherRightsInformation(models.Model): + id = models.AutoField(primary_key=True, db_column='pk', editable=False) + rightsstatement = models.ForeignKey(RightsStatement, db_column='fkRightsStatement') + otherrightsbasis = models.TextField(db_column='otherRightsBasis', verbose_name='Other rights basis', blank=True) + otherrightsapplicablestartdate = models.TextField(db_column='otherRightsApplicableStartDate', blank=True, verbose_name='Other rights start date', help_text='Use ISO 8061 (YYYY-MM-DD)') + otherrightsapplicableenddate = models.TextField(db_column='otherRightsApplicableEndDate', blank=True, verbose_name='Other rights end date', help_text='Use ISO 8061 (YYYY-MM-DD)') + otherrightsenddateopen = models.BooleanField(db_column='otherRightsApplicableEndDateOpen', verbose_name='Open End Date', help_text='Indicate end date is open') + + class Meta: + db_table = u'RightsStatementOtherRightsInformation' + +class RightsStatementOtherRightsDocumentationIdentifier(models.Model): + id = models.AutoField(primary_key=True, db_column='pk', editable=False) + rightsstatementotherrights = models.ForeignKey(RightsStatementOtherRightsInformation, db_column='fkRightsStatementOtherRightsInformation') + otherrightsdocumentationidentifiertype = models.TextField(db_column='otherRightsDocumentationIdentifierType', blank=True, verbose_name='Other rights document identification type') + otherrightsdocumentationidentifiervalue = models.TextField(db_column='otherRightsDocumentationIdentifierValue', blank=True, verbose_name='Other right document identification value') + otherrightsdocumentationidentifierrole = models.TextField(db_column='otherRightsDocumentationIdentifierRole', blank=True, verbose_name='Other rights document identification role') + + class Meta: + db_table = u'RightsStatementOtherRightsDocumentationIdentifier' + +class RightsStatementOtherRightsInformationNote(models.Model): + id = models.AutoField(primary_key=True, db_column='pk') + rightsstatementotherrights = models.ForeignKey(RightsStatementOtherRightsInformation, db_column='fkRightsStatementOtherRightsInformation') + otherrightsnote = models.TextField(db_column='otherRightsNote', verbose_name='Other rights note', blank=True) + + class Meta: + db_table = u'RightsStatementOtherRightsNote' + +class RightsStatementLinkingAgentIdentifier(models.Model): + id = models.AutoField(primary_key=True, db_column='pk') + rightsstatement = models.ForeignKey(RightsStatement, db_column='fkRightsStatement') + linkingagentidentifiertype = models.TextField(db_column='linkingAgentIdentifierType', verbose_name='Linking Agent', blank=True) + linkingagentidentifiervalue = models.TextField(db_column='linkingAgentIdentifierValue', verbose_name='Linking Agent Value', blank=True) + + class Meta: + db_table = u'RightsStatementLinkingAgentIdentifier' + +class SourceDirectory(models.Model): + id = models.AutoField(primary_key=True, db_column='pk') + path = models.TextField(db_column='path') + + def __unicode__(self): + return self.path + + class Meta: + db_table = u'SourceDirectories' + +""" MCP data interoperability """ + +class MicroServiceChain(models.Model): + id = models.AutoField(primary_key=True, db_column='pk') + startinglink = models.IntegerField(db_column='startingLink') + description = models.TextField(db_column='description') + + class Meta: + db_table = u'MicroServiceChains' + +class MicroServiceChainLink(models.Model): + id = models.AutoField(primary_key=True, db_column='pk') + currenttask = models.IntegerField(db_column='currentTask') + defaultnextchainlink = models.IntegerField(null=True, default=1, db_column='defaultNextChainLink') + defaultplaysound = models.IntegerField(null=True, db_column='defaultPlaySound') + microservicegroup = models.TextField(db_column='microserviceGroup') + reloadfilelist = models.IntegerField(default=1, db_column='reloadFileList') + defaultexitmessage = models.TextField(default='Failed', db_column='defaultExitMessage') + + class Meta: + db_table = u'MicroServiceChainLinks' + +class MicroServiceChainLinkExitCode(models.Model): + id = models.AutoField(primary_key=True, db_column='pk') + microservicechainlink = models.IntegerField(db_column='microServiceChainLink') + exitcode = models.IntegerField(db_column='exitCode') + nextmicroservicechainlink = models.IntegerField(db_column='nextMicroServiceChainLink') + playsound = models.IntegerField(null=True, db_column='playSound') + exitmessage = models.TextField(db_column='exitMessage') + + class Meta: + db_table = u'MicroServiceChainLinksExitCodes' + +class MicroServiceChainChoice(models.Model): + id = models.AutoField(primary_key=True, db_column='pk') + choiceavailableatlink = models.IntegerField(db_column='choiceAvailableAtLink') + chainavailable = models.IntegerField(db_column='chainAvailable') + + class Meta: + db_table = u'MicroServiceChainChoice' + +class MicroServiceChoiceReplacementDic(models.Model): + id = models.AutoField(primary_key=True, db_column='pk') + choiceavailableatlink = models.IntegerField(db_column='choiceAvailableAtLink') + description = models.TextField(db_column='description', verbose_name='Description') + replacementdic = models.TextField(db_column='replacementDic', verbose_name='Configuration') + + def clean(self): + error = None + try: + config = ast.literal_eval(self.replacementdic) + except ValueError: + error = 'Invalid syntax.' + except SyntaxError: + error = 'Invalid syntax.' + if error == None and not type(config) is dict: + error = 'Invalid syntax.' + if error != None: + raise forms.ValidationError(error) + + class Meta: + db_table = u'MicroServiceChoiceReplacementDic' + +class StandardTaskConfig(models.Model): + id = models.AutoField(primary_key=True, db_column='pk') + execute = models.TextField(db_column='execute', blank=True) + arguments = models.TextField(db_column='arguments', blank=True) + + class Meta: + db_table = u'StandardTasksConfigs' + +class TaskConfig(models.Model): + id = models.AutoField(primary_key=True, db_column='pk') + tasktype = models.IntegerField(db_column='taskType') + tasktypepkreference = models.IntegerField(db_column='taskTypePKReference') + description = models.TextField(db_column='description') + + class Meta: + db_table = u'TasksConfigs' diff --git a/src/dashboard/src/main/templatetags/__init__.py b/src/dashboard/src/main/templatetags/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/dashboard/src/main/templatetags/active.py b/src/dashboard/src/main/templatetags/active.py new file mode 100644 index 0000000000..9d1410ae28 --- /dev/null +++ b/src/dashboard/src/main/templatetags/active.py @@ -0,0 +1,13 @@ +from django.template import Library +import math + +register = Library() + +@register.simple_tag +def active(request, pattern): + if request.path.startswith(pattern) and pattern != '/': + return 'active' + elif request.path == pattern == '/': + return 'active' + else: + return '' diff --git a/src/dashboard/src/main/templatetags/breadcumbs.py b/src/dashboard/src/main/templatetags/breadcumbs.py new file mode 100644 index 0000000000..fcec33e327 --- /dev/null +++ b/src/dashboard/src/main/templatetags/breadcumbs.py @@ -0,0 +1,110 @@ +from django.template import loader, Node, Variable, Library +from django.utils.encoding import smart_str, smart_unicode +from django.template.defaulttags import url +from django.template import VariableDoesNotExist + +register = Library() + +@register.tag +def breadcrumb(parser, token): + """ + Renders the breadcrumb. + Examples: + {% breadcrumb "Title of breadcrumb" url_var %} + {% breadcrumb context_var url_var %} + {% breadcrumb "Just the title" %} + {% breadcrumb just_context_var %} + + Parameters: + - First parameter is the title of the crumb, + - Second (optional) parameter is the url variable to link to, produced by url tag, i.e.: + {% url person_detail object.id as person_url %} + then: + {% breadcrumb person.name person_url %} + + @author Andriy Drozdyuk + """ + return BreadcrumbNode(token.split_contents()[1:]) + + +@register.tag +def breadcrumb_url(parser, token): + """ + Same as breadcrumb + but instead of url context variable takes in all the + arguments URL tag takes. + {% breadcrumb "Title of breadcrumb" person_detail person.id %} + {% breadcrumb person.name person_detail person.id %} + """ + + bits = token.split_contents() + if len(bits)==2: + return breadcrumb(parser, token) + + # Extract our extra title parameter + title = bits.pop(1) + token.contents = ' '.join(bits) + + url_node = url(parser, token) + + return UrlBreadcrumbNode(title, url_node) + + +class BreadcrumbNode(Node): + def __init__(self, vars): + self.vars = map(Variable,vars) + + def render(self, context): + title = self.vars[0].var + + if title.find("'")==-1 and title.find('"')==-1: + try: + val = self.vars[0] + title = val.resolve(context) + except: + title = '' + + else: + title=title.strip("'").strip('"') + title=smart_unicode(title) + + url = None + + if len(self.vars)>1: + val = self.vars[1] + try: + url = val.resolve(context) + except VariableDoesNotExist: + print 'URL does not exist', val + url = None + + return create_crumb(title, url) + +class UrlBreadcrumbNode(Node): + def __init__(self, title, url_node): + self.title = Variable(title) + self.url_node = url_node + + def render(self, context): + title = self.title.var + + if title.find("'")==-1 and title.find('"')==-1: + try: + val = self.title + title = val.resolve(context) + except: + title = '' + else: + title=title.strip("'").strip('"') + title=smart_unicode(title) + + url = self.url_node.render(context) + return create_crumb(title, url) + +def create_crumb(title, url=None): + if url: + return '
  • %s /
  • ' % (url, title) + else: + return "
  • %s
  • " % title + + return crumb diff --git a/src/dashboard/src/main/templatetags/forms.py b/src/dashboard/src/main/templatetags/forms.py new file mode 100644 index 0000000000..6a6da4e0ab --- /dev/null +++ b/src/dashboard/src/main/templatetags/forms.py @@ -0,0 +1,7 @@ +from django.template import Library + +register = Library() + +@register.filter +def is_checkbox(field): + return field.field.widget.__class__.__name__.lower() == "checkboxinput" diff --git a/src/dashboard/src/main/templatetags/math.py b/src/dashboard/src/main/templatetags/math.py new file mode 100644 index 0000000000..5433fac1d7 --- /dev/null +++ b/src/dashboard/src/main/templatetags/math.py @@ -0,0 +1,10 @@ +from django.template import Node, Library +import math + +register = Library() + +@register.filter +def math(lopr, expr): + if lopr: + return eval(expr.replace('$1', str(lopr)), {"__builtins__": None}) + return '' diff --git a/src/dashboard/src/main/templatetags/percentage.py b/src/dashboard/src/main/templatetags/percentage.py new file mode 100644 index 0000000000..16c7472a9a --- /dev/null +++ b/src/dashboard/src/main/templatetags/percentage.py @@ -0,0 +1,11 @@ +from django.template import Node, Library + +register = Library() + +@register.filter +def percentage(value, total): + try: + percentage = float(value) / float(total) * 100 + except ZeroDivisionError: + percentage = 0 + return '{2:.3g}%'.format(value, total, percentage) diff --git a/src/dashboard/src/main/urls.py b/src/dashboard/src/main/urls.py new file mode 100644 index 0000000000..799703ac09 --- /dev/null +++ b/src/dashboard/src/main/urls.py @@ -0,0 +1,102 @@ +from django.conf.urls.defaults import * +from django.conf import settings +from django.views.generic.simple import direct_to_template, redirect_to + +UUID_REGEX = '[\w]{8}(-[\w]{4}){3}-[\w]{12}' + +urlpatterns = patterns('main.views', + + # Index + (r'^$', 'home'), + + # Forbidden + (r'forbidden/$', 'forbidden'), + + # Transfer + (r'transfer/$', 'transfer_grid'), + (r'transfer/(?P' + UUID_REGEX + ')/$', 'transfer_detail'), + (r'transfer/(?P' + UUID_REGEX + ')/delete/$', 'transfer_delete'), + (r'transfer/(?P' + UUID_REGEX + ')/microservices/$', 'transfer_microservices'), + (r'transfer/(?P' + UUID_REGEX + ')/rights/$', 'transfer_rights_list'), + (r'transfer/(?P' + UUID_REGEX + ')/rights/add/$', 'transfer_rights_edit'), + (r'transfer/(?P' + UUID_REGEX + ')/rights/(?P\d+)/$', 'transfer_rights_edit'), + (r'transfer/(?P' + UUID_REGEX + ')/rights/delete/(?P\d+)/$', 'transfer_rights_delete'), + (r'transfer/(?P' + UUID_REGEX + ')/rights/grants/(?P\d+)/$', 'transfer_rights_grants_edit'), + (r'transfer/status/$', 'transfer_status'), + (r'transfer/status/(?P' + UUID_REGEX + ')/$', 'transfer_status'), + (r'transfer/select/(?P\d+)/$', 'transfer_select'), + (r'transfer/browser/$', 'transfer_browser'), + + # Ingest + (r'ingest/$', 'ingest_grid'), + (r'ingest/(?P' + UUID_REGEX + ')/$', 'ingest_detail'), + (r'ingest/(?P' + UUID_REGEX + ')/delete/$', 'ingest_delete'), + (r'ingest/(?P' + UUID_REGEX + ')/metadata/$', 'ingest_metadata_list'), + (r'ingest/(?P' + UUID_REGEX + ')/metadata/add/$', 'ingest_metadata_edit'), + (r'ingest/(?P' + UUID_REGEX + ')/metadata/(?P\d+)/$', 'ingest_metadata_edit'), + (r'ingest/(?P' + UUID_REGEX + ')/metadata/delete/(?P\d+)/$', 'ingest_metadata_delete'), + (r'ingest/(?P' + UUID_REGEX + ')/microservices/$', 'ingest_microservices'), + (r'ingest/(?P' + UUID_REGEX + ')/rights/$', 'ingest_rights_list'), + (r'ingest/(?P' + UUID_REGEX + ')/rights/add/$', 'ingest_rights_edit'), + (r'ingest/(?P' + UUID_REGEX + ')/rights/(?P\d+)/$', 'ingest_rights_edit'), + (r'ingest/(?P' + UUID_REGEX + ')/rights/delete/(?P\d+)/$', 'ingest_rights_delete'), + (r'ingest/(?P' + UUID_REGEX + ')/rights/grants/(?P\d+)/$', 'ingest_rights_grants_edit'), + (r'ingest/(?P' + UUID_REGEX + ')/upload/$', 'ingest_upload'), + (r'ingest/status/$', 'ingest_status'), + (r'ingest/status/(?P' + UUID_REGEX + ')/$', 'ingest_status'), + (r'ingest/normalization-report/(?P' + UUID_REGEX + ')/$', 'ingest_normalization_report'), + (r'ingest/preview/aip/(?P' + UUID_REGEX + ')/$', 'ingest_browse_aip'), + (r'ingest/preview/normalization/(?P' + UUID_REGEX + ')/$', 'ingest_browse_normalization'), + + # Jobs and tasks (is part of ingest) + (r'jobs/(?P' + UUID_REGEX + ')/explore/$', 'jobs_explore'), + (r'jobs/(?P' + UUID_REGEX + ')/list-objects/$', 'jobs_list_objects'), + (r'tasks/(?P' + UUID_REGEX + ')/$', 'tasks'), + (r'task/(?P' + UUID_REGEX + ')/$', 'task'), + + # Access + (r'access/$', 'access_list'), + (r'access/(?P\d+)/delete/$', 'access_delete'), + + # Lookup + (r'lookup/rightsholder/(?P\d+)/$', 'rights_holders_lookup'), + + # Autocomplete + (r'autocomplete/rightsholders$', 'rights_holders_autocomplete'), + + # Administration + (r'administration/$', 'administration'), + #(r'administration/edit/(?P\d+)/$', 'administration_edit'), + (r'administration/dip/$', 'administration_dip'), + (r'administration/dip/edit/(?P\d+)/$', 'administration_dip_edit'), + (r'administration/dips/atom/$', 'administration_atom_dips'), + (r'administration/dips/contentdm/$', 'administration_contentdm_dips'), + (r'administration/sources/$', 'administration_sources'), + (r'administration/sources/delete/json/(?P\d+)/$', 'administration_sources_delete_json'), + (r'administration/processing/$', 'administration_processing'), + (r'administration/sources/json/$', 'administration_sources_json'), + + # Disabled until further development can be done + #(r'administration/search/$', 'administration_search'), + #(r'administration/search/flush/aips/$', 'administration_search_flush_aips'), + + # JSON feeds + (r'status/$', 'status'), + (r'formdata/(?P\w+)/(?P\d+)/(?P\d+)/$', 'formdata_delete'), + (r'formdata/(?P\w+)/(?P\d+)/$', 'formdata'), +) + +# Filesystem related JSON views +urlpatterns += patterns('main.filesystem', + (r'filesystem/download/$', 'download'), + (r'filesystem/contents/$', 'contents'), + (r'filesystem/children/$', 'directory_children'), + + (r'filesystem/delete/$', 'delete'), + (r'filesystem/copy_to_originals/$', 'copy_to_originals'), + (r'filesystem/copy_to_arrange/$', 'copy_to_arrange'), + (r'filesystem/copy_transfer_component/$', 'copy_transfer_component'), + (r'filesystem/get_temp_directory/$', 'get_temp_directory'), + (r'filesystem/ransfer/$', 'copy_to_start_transfer'), + (r'filesystem/copy_from_arrange/$', 'copy_from_arrange_to_completed') +) diff --git a/src/dashboard/src/main/views.py b/src/dashboard/src/main/views.py new file mode 100644 index 0000000000..95fb7e4a50 --- /dev/null +++ b/src/dashboard/src/main/views.py @@ -0,0 +1,1482 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +from django.db.models import Max +from django.conf import settings as django_settings +from django.core.exceptions import ObjectDoesNotExist +from django.core.urlresolvers import reverse +from django.core.paginator import Paginator, InvalidPage, EmptyPage +from django.db import connection, transaction +from django.forms.models import modelformset_factory, inlineformset_factory +from django.shortcuts import render_to_response, get_object_or_404, redirect, render +from django.http import Http404, HttpResponse, HttpResponseBadRequest, HttpResponseRedirect +from django.utils import simplejson +from django.template import RequestContext +from django.utils.dateformat import format +from views_NormalizationReport import getNormalizationReportQuery +from contrib.mcp.client import MCPClient +from contrib import utils +from main import forms +from main import models +from main import filesystem +from lxml import etree +from lxml import objectify +import calendar +import cPickle +from datetime import datetime +import os +import re +import subprocess +import sys +sys.path.append("/usr/lib/archivematica/archivematicaCommon/externals") +import pyes +from django.contrib.auth.decorators import user_passes_test +import urllib +import components.decorators as decorators + +# Used for raw SQL queries to return data in dictionaries instead of lists +def dictfetchall(cursor): + "Returns all rows from a cursor as a dict" + desc = cursor.description + return [ + dict(zip([col[0] for col in desc], row)) + for row in cursor.fetchall() + ] + +""" @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ + Home + @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ """ + +def home(request): + return HttpResponseRedirect(reverse('main.views.transfer_grid')) + +""" @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ + Status + @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ """ + +# TODO: hide removed elements +def status(request): + client = MCPClient() + xml = etree.XML(client.list()) + + sip_count = len(xml.xpath('//choicesAvailableForUnits/choicesAvailableForUnit/unit/type[text()="SIP"]')) + transfer_count = len(xml.xpath('//choicesAvailableForUnits/choicesAvailableForUnit/unit/type[text()="Transfer"]')) + dip_count = len(xml.xpath('//choicesAvailableForUnits/choicesAvailableForUnit/unit/type[text()="DIP"]')) + + response = {'sip': sip_count, 'transfer': transfer_count, 'dip': dip_count} + + return HttpResponse(simplejson.JSONEncoder().encode(response), mimetype='application/json') + +""" @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ + Rights-related + @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ """ + +def rights_parse_agent_id(input): + return 0 + if input == '': + agentId = 0 + else: + agentRaw = input + try: + int(agentRaw) + agentId = int(agentRaw) + except ValueError: + agentRe = re.compile('(.*)\[(\d*)\]') + match = agentRe.match(agentRaw) + if match: + agentId = match.group(2) + else: + agentId = 0 + return agentId + +def rights_edit(request, uuid, id=None, section='ingest'): + jobs = models.Job.objects.filter(sipuuid=uuid) + name = utils.get_directory_name(jobs[0]) + + # flag indicating what kind of new content, if any, has been created + new_content_type_created = None + + max_notes = 1 + + if id: + viewRights = models.RightsStatement.objects.get(pk=id) + agentId = None + if request.method == 'POST': + postData = request.POST.copy() + """ + agentId = rights_parse_agent_id(postData.get('rightsholder')) + if agentId == 0 and postData.get('rightsholder') != '0' and postData.get('rightsholder') != '': + agent = models.RightsStatementLinkingAgentIdentifier() + agent.rightsstatement = viewRights + agent.linkingagentidentifiervalue = postData.get('rightsholder') + agent.save() + agentId = agent.id + postData.__setitem__('rightsholder', agentId) + """ + form = forms.RightsForm(postData, instance=viewRights) + form.cleaned_data = postData + viewRights = form.save() + else: + form = forms.RightsForm(instance=viewRights) + form.cleaned_data = viewRights + form.save() + + # determine how many empty forms should be shown for children + extra_copyright_forms = max_notes - models.RightsStatementCopyright.objects.filter(rightsstatement=viewRights).count() + extra_statute_forms = max_notes - models.RightsStatementStatuteInformation.objects.filter(rightsstatement=viewRights).count() + extra_license_forms = max_notes - models.RightsStatementLicense.objects.filter(rightsstatement=viewRights).count() + extra_other_forms = max_notes - models.RightsStatementOtherRightsInformation.objects.filter(rightsstatement=viewRights).count() + else: + if request.method == 'POST': + postData = request.POST.copy() + agentId = rights_parse_agent_id(postData.get('rightsholder')) + postData.__setitem__('rightsholder', agentId) + form = forms.RightsForm(postData) + else: + form = forms.RightsForm() + viewRights = models.RightsStatement() + + extra_copyright_forms = max_notes + extra_statute_forms = max_notes + extra_license_forms = max_notes + extra_license_notes = max_notes + extra_other_forms = max_notes + + # create inline formsets for child elements + CopyrightFormSet = inlineformset_factory( + models.RightsStatement, + models.RightsStatementCopyright, + extra=extra_copyright_forms, + can_delete=False, + form=forms.RightsCopyrightForm + ) + + StatuteFormSet = inlineformset_factory( + models.RightsStatement, + models.RightsStatementStatuteInformation, + extra=extra_statute_forms, + can_delete=False, + form=forms.RightsStatuteForm + ) + + LicenseFormSet = inlineformset_factory( + models.RightsStatement, + models.RightsStatementLicense, + extra=extra_license_forms, + can_delete=False, + form=forms.RightsLicenseForm + ) + + OtherFormSet = inlineformset_factory( + models.RightsStatement, + models.RightsStatementOtherRightsInformation, + extra=extra_other_forms, + can_delete=False, + form=forms.RightsOtherRightsForm + ) + + # handle form creation/saving + if request.method == 'POST': + if id: + createdRights = viewRights + else: + createdRights = form.save() + sectionTypeID = {'transfer': 2, 'ingest': 1} + createdRights.metadataappliestotype = sectionTypeID[section] + createdRights.metadataappliestoidentifier = uuid + createdRights.save() + + copyrightFormset = CopyrightFormSet(request.POST, instance=createdRights) + createdCopyrightSet = copyrightFormset.save() + + # if copyright has been created, do a reload and inform user - MIGHT NOT NEED TO DO THIS NOW + #if request.POST.get('copyright_previous_pk', '') == 'None' and createdCopyright: + # new_content_type_created = 'copyright' + + # establish whether or not there is a copyright information instance to use as a parent + if len(createdCopyrightSet) == 1: + createdCopyright = createdCopyrightSet[0] + else: + createdCopyright = False + + # handle creation of new copyright notes, creating parent if necessary + if request.POST.get('copyright_note', '') != '': + # make new copyright record if it doesn't exist + if not createdCopyright: + try: + createdCopyright = models.RightsStatementCopyright.objects.get(rightsstatement=createdRights) + except: + createdCopyright = models.RightsStatementCopyright(rightsstatement=createdRights) + createdCopyright.save() + + copyrightNote = models.RightsStatementCopyrightNote(rightscopyright=createdCopyright) + copyrightNote.copyrightnote = request.POST.get('copyright_note', '') + copyrightNote.save() + + new_content_type_created = 'copyright' + + # handle creation of new documentation identifiers + if request.POST.get('copyright_documentation_identifier_type', '') != '' or request.POST.get('copyright_documentation_identifier_value', '') != '' or request.POST.get('copyright_documentation_identifier_role', ''): + # make new copyright record if it doesn't exist + if not createdCopyright: + try: + createdCopyright = models.RightsStatementCopyright.objects.get(rightsstatement=createdRights) + except: + createdCopyright = models.RightsStatementCopyright(rightsstatement=createdRights) + createdCopyright.save() + + copyrightDocIdentifier = models.RightsStatementCopyrightDocumentationIdentifier(rightscopyright=createdCopyright) + copyrightDocIdentifier.copyrightdocumentationidentifiertype = request.POST.get('copyright_documentation_identifier_type', '') + copyrightDocIdentifier.copyrightdocumentationidentifiervalue = request.POST.get('copyright_documentation_identifier_value', '') + copyrightDocIdentifier.copyrightdocumentationidentifierrole = request.POST.get('copyright_documentation_identifier_role', '') + copyrightDocIdentifier.save() + + new_content_type_created = 'copyright' + + licenseFormset = LicenseFormSet(request.POST, instance=createdRights) + createdLicenseSet = licenseFormset.save() + #if request.POST.get('license_previous_pk', '') == 'None' and len(createdLicense) == 1: + # new_content_type_created = 'license' + + # establish whether or not there is a license instance to use as a parent + if len(createdLicenseSet) == 1: + createdLicense = createdLicenseSet[0] + else: + createdLicense = False + + # handle creation of new copyright notes, creating parent if necessary + if request.POST.get('license_note', '') != '': + # make new copyright record if it doesn't exist + if not createdLicense: + try: + createdLicense = models.RightsStatementLicense.objects.get(rightsstatement=createdRights) + except: + createdLicense = models.RightsStatementLicense(rightsstatement=createdRights) + createdLicense.save() + + licenseNote = models.RightsStatementLicenseNote(rightsstatementlicense=createdLicense) + licenseNote.licensenote = request.POST.get('license_note', '') + licenseNote.save() + + new_content_type_created = 'license' + + # handle creation of new documentation identifiers + if request.POST.get('license_documentation_identifier_type', '') != '' or request.POST.get('license_documentation_identifier_value', '') != '' or request.POST.get('license_documentation_identifier_role', ''): + # make new license record if it doesn't exist + if not createdLicense: + try: + createdLicense = models.RightsStatementLicense.objects.get(rightsstatement=createdRights) + except: + createdLicense = models.RightsStatementLicense(rightsstatement=createdRights) + createdLicense.save() + + licenseDocIdentifier = models.RightsStatementLicenseDocumentationIdentifier(rightsstatementlicense=createdLicense) + licenseDocIdentifier.licensedocumentationidentifiertype = request.POST.get('license_documentation_identifier_type', '') + licenseDocIdentifier.licensedocumentationidentifiervalue = request.POST.get('license_documentation_identifier_value', '') + licenseDocIdentifier.licensedocumentationidentifierrole = request.POST.get('license_documentation_identifier_role', '') + licenseDocIdentifier.save() + + new_content_type_created = 'license' + + statuteFormset = StatuteFormSet(request.POST, instance=createdRights) + createdStatuteSet = statuteFormset.save() + if request.POST.get('statute_previous_pk', '') == 'None' and len(createdStatuteSet) == 1: + new_content_type_created = 'statute' + + #restrictionCreated = False + noteCreated = False + for form in statuteFormset.forms: + statuteCreated = False + + # handle documentation identifier creation for a parent that's a blank statute + if (request.POST.get('statute_documentation_identifier_type_None', '') != '' or request.POST.get('statute_documentation_identifier_value_None', '') != '' or request.POST.get('statute_documentation_identifier_role_None', '') != ''): + if form.instance.pk: + statuteCreated = form.instance + else: + statuteCreated = models.RightsStatementStatuteInformation(rightsstatement=createdRights) + statuteCreated.save() + + statuteDocIdentifier = models.RightsStatementStatuteDocumentationIdentifier(rightsstatementstatute=statuteCreated) + statuteDocIdentifier.statutedocumentationidentifiertype = request.POST.get('statute_documentation_identifier_type_None', '') + statuteDocIdentifier.statutedocumentationidentifiervalue = request.POST.get('statute_documentation_identifier_value_None', '') + statuteDocIdentifier.statutedocumentationidentifierrole = request.POST.get('statute_documentation_identifier_role_None', '') + statuteDocIdentifier.save() + new_content_type_created = 'statute' + else: + # handle documentation identifier creation for a parent statute that already exists + if request.POST.get('statute_documentation_identifier_type_' + str(form.instance.pk), '') != '' or request.POST.get('statute_documentation_identifier_value_' + str(form.instance.pk), '') or request.POST.get('statute_documentation_identifier_role_' + str(form.instance.pk), ''): + statuteDocIdentifier = models.RightsStatementStatuteDocumentationIdentifier(rightsstatementstatute=form.instance) + statuteDocIdentifier.statutedocumentationidentifiertype = request.POST.get('statute_documentation_identifier_type_' + str(form.instance.pk), '') + statuteDocIdentifier.statutedocumentationidentifiervalue = request.POST.get('statute_documentation_identifier_value_' + str(form.instance.pk), '') + statuteDocIdentifier.statutedocumentationidentifierrole = request.POST.get('statute_documentation_identifier_role_' + str(form.instance.pk), '') + statuteDocIdentifier.save() + new_content_type_created = 'statute' + + # handle note creation for a parent that's a blank grant + if request.POST.get('new_statute_note_None', '') != '' and not form.instance.pk: + if not statuteCreated: + statuteCreated = models.RightsStatementStatuteInformation(rightsstatement=createdRights) + statuteCreated.save() + noteCreated = models.RightsStatementStatuteInformationNote(rightsstatementstatute=statuteCreated) + noteCreated.statutenote = request.POST.get('new_statute_note_None', '') + noteCreated.save() + new_content_type_created = 'statue' + else: + # handle note creation for a parent grant that already exists + if request.POST.get('new_statute_note_' + str(form.instance.pk), '') != '': + noteCreated = models.RightsStatementStatuteInformationNote(rightsstatementstatute=form.instance) + noteCreated.statutenote = request.POST.get('new_statute_note_' + str(form.instance.pk), '') + noteCreated.save() + new_content_type_created = 'statute' + + # handle note creation for a parent that's just been created + if request.POST.get('new_statute_note_None', '') != '' and not noteCreated: + noteCreated = models.RightsStatementStatuteInformationNote(rightsstatementstatute=form.instance) + noteCreated.statutenote = request.POST.get('new_statute_note_None', '') + noteCreated.save() + + # display (possibly revised) formset + statuteFormset = StatuteFormSet(instance=createdRights) + + otherFormset = OtherFormSet(request.POST, instance=createdRights) + createdOtherSet = otherFormset.save() + #if request.POST.get('other_previous_pk', '') == 'None' and len(createdOther) == 1: + # new_content_type_created = createdRights.rightsbasis + + # establish whether or not there is an "other" instance to use as a parent + if len(createdOtherSet) == 1: + createdOther = createdOtherSet[0] + else: + createdOther = False + + # handle creation of new "other" notes, creating parent if necessary + if request.POST.get('otherrights_note', '') != '': + # make new "other" record if it doesn't exist + if not createdOther: + try: + createdOther = models.RightsStatementOtherRightsInformation.objects.get(rightsstatement=createdRights) + except: + createdOther = models.RightsStatementOtherRightsInformation(rightsstatement=createdRights) + createdOther.save() + + otherNote = models.RightsStatementOtherRightsInformationNote(rightsstatementotherrights=createdOther) + otherNote.otherrightsnote = request.POST.get('otherrights_note', '') + otherNote.save() + + new_content_type_created = 'other' + + # handle creation of new documentation identifiers + if request.POST.get('other_documentation_identifier_type', '') != '' or request.POST.get('other_documentation_identifier_value', '') != '' or request.POST.get('other_documentation_identifier_role', ''): + # make new other record if it doesn't exist + if not createdOther: + try: + createdOther = models.RightsStatementOtherRightsInformation.objects.get(rightsstatement=createdRights) + except: + createdOther = models.RightsStatementOtherRightsInformation(rightsstatement=createdRights) + createdOther.save() + + otherDocIdentifier = models.RightsStatementOtherRightsDocumentationIdentifier(rightsstatementotherrights=createdOther) + otherDocIdentifier.otherrightsdocumentationidentifiertype = request.POST.get('other_documentation_identifier_type', '') + otherDocIdentifier.otherrightsdocumentationidentifiervalue = request.POST.get('other_documentation_identifier_value', '') + otherDocIdentifier.otherrightsdocumentationidentifierrole = request.POST.get('other_documentation_identifier_role', '') + otherDocIdentifier.save() + + new_content_type_created = 'other' + + if request.POST.get('next_button', '') != None and request.POST.get('next_button', '') != '': + return HttpResponseRedirect( + reverse('main.views.%s_rights_grants_edit' % section, args=[uuid, createdRights.pk]) + ) + else: + url = reverse('main.views.%s_rights_edit' % section, args=[uuid, createdRights.pk]) + try: + new_content_type_created + url = url + '?created=' + new_content_type_created + except: + pass + return HttpResponseRedirect(url) + else: + copyrightFormset = CopyrightFormSet(instance=viewRights) + statuteFormset = StatuteFormSet(instance=viewRights) + licenseFormset = LicenseFormSet(instance=viewRights) + otherFormset = OtherFormSet(instance=viewRights) + + # show what content's been created after a redirect + if request.GET.get('created', '') != '': + new_content_type_created = request.GET.get('created', '') + + return render(request, 'main/rights_edit.html', locals()) + +def rights_grants_edit(request, uuid, id, section='ingest'): + jobs = models.Job.objects.filter(sipuuid=uuid) + name = utils.get_directory_name(jobs[0]) + + viewRights = models.RightsStatement.objects.get(pk=id) + + # determine how many empty forms should be shown for children + extra_grant_forms = 1 + + # create inline formsets for child elements + GrantFormSet = inlineformset_factory( + models.RightsStatement, + models.RightsStatementRightsGranted, + extra=extra_grant_forms, + can_delete=False, + form=forms.RightsGrantedForm + ) + + # handle form creation/saving + if request.method == 'POST': + grantFormset = GrantFormSet(request.POST, instance=viewRights) + grantFormset.save() + restrictionCreated = False + noteCreated = False + for form in grantFormset.forms: + grantCreated = False + + # handle restriction creation for a parent that's a blank grant + if request.POST.get('new_rights_restriction_None', '') != '' and not form.instance.pk: + grantCreated = models.RightsStatementRightsGranted(rightsstatement=viewRights) + grantCreated.save() + restrictionCreated = models.RightsStatementRightsGrantedRestriction(rightsgranted=grantCreated) + restrictionCreated.restriction = request.POST.get('new_rights_restriction_None', '') + restrictionCreated.save() + else: + # handle restriction creation for a parent grant that already exists + if request.POST.get('new_rights_restriction_' + str(form.instance.pk), '') != '': + restrictionCreated = models.RightsStatementRightsGrantedRestriction(rightsgranted=form.instance) + restrictionCreated.restriction = request.POST.get('new_rights_restriction_' + str(form.instance.pk), '') + restrictionCreated.save() + + # handle note creation for a parent that's a blank grant + if request.POST.get('new_rights_note_None', '') != '' and not form.instance.pk: + if not grantCreated: + grantCreated = models.RightsStatementRightsGranted(rightsstatement=viewRights) + grantCreated.save() + noteCreated = models.RightsStatementRightsGrantedNote(rightsgranted=grantCreated) + noteCreated.rightsgrantednote = request.POST.get('new_rights_note_None', '') + noteCreated.save() + else: + # handle note creation for a parent grant that already exists + if request.POST.get('new_rights_note_' + str(form.instance.pk), '') != '': + noteCreated = models.RightsStatementRightsGrantedNote(rightsgranted=form.instance) + noteCreated.rightsgrantednote = request.POST.get('new_rights_note_' + str(form.instance.pk), '') + noteCreated.save() + + # handle restriction creation for a parent that's just been created + if request.POST.get('new_rights_restriction_None', '') != '' and not restrictionCreated: + restrictionCreated = models.RightsStatementRightsGrantedRestriction(rightsgranted=form.instance) + restrictionCreated.restriction = request.POST.get('new_rights_restriction_None', '') + restrictionCreated.save() + + # handle note creation for a parent that's just been created + if request.POST.get('new_rights_note_None', '') != '' and not noteCreated: + noteCreated = models.RightsStatementRightsGrantedNote(rightsgranted=form.instance) + noteCreated.rightsgrantednote = request.POST.get('new_rights_note_None', '') + noteCreated.save() + + # display (possibly revised) formset + grantFormset = GrantFormSet(instance=viewRights) + + if request.method == 'POST': + if request.POST.get('next_button', '') != None and request.POST.get('next_button', '') != '': + return HttpResponseRedirect(reverse('main.views.%s_rights_list' % section, args=[uuid])) + else: + url = reverse('main.views.%s_rights_grants_edit' % section, args=[uuid, viewRights.pk]) + try: + new_content_type_created + url = url + '?created=' + new_content_type_created + except: + pass + return HttpResponseRedirect(url) + else: + return render(request, 'main/rights_grants_edit.html', locals()) + +def rights_delete(request, uuid, id, section): + models.RightsStatement.objects.get(pk=id).delete() + return HttpResponseRedirect(reverse('main.views.%s_rights_list' % section, args=[uuid])) + +def rights_holders_lookup(request, id): + try: + agent = models.RightsStatementLinkingAgentIdentifier.objects.get(pk=id) + result = agent.linkingagentidentifiervalue + ' [' + str(agent.id) + ']' + except: + result = '' + return HttpResponse(result) + +def rights_holders_autocomplete(request): + + search_text = '' + + try: + search_text = request.REQUEST['text'] + except Exception: pass + + response_data = {} + + agents = models.RightsStatementLinkingAgentIdentifier.objects.filter(linkingagentidentifiervalue__icontains=search_text) + for agent in agents: + value = agent.linkingagentidentifiervalue + ' [' + str(agent.id) + ']' + response_data[value] = value + + return HttpResponse(simplejson.dumps(response_data), mimetype='application/json') + +def rights_list(request, uuid, section): + jobs = models.Job.objects.filter(sipuuid=uuid) + name = utils.get_directory_name(jobs[0]) + + # See MetadataAppliesToTypes table + types = { 'ingest': 1, 'transfer': 2, 'file': 3 } + + grants = models.RightsStatementRightsGranted.objects.filter( + rightsstatement__metadataappliestotype__exact=types[section], + rightsstatement__metadataappliestoidentifier__exact=uuid + ) + + # create result list that incorporates multiple restriction records + modifiedGrants = [] + for grant in grants: + item = { + 'act': grant.act, + 'basis': grant.rightsstatement.rightsbasis, + 'restrictions': [], + 'startdate': grant.startdate, + 'enddate': grant.enddate, + 'rightsstatement': grant.rightsstatement + } + + if (grant.enddateopen): + item['enddate'] = '(open)' + + restriction_data = models.RightsStatementRightsGrantedRestriction.objects.filter(rightsgranted=grant) + restrictions = [] + for restriction in restriction_data: + #return HttpResponse(restriction.restriction) + restrictions.append(restriction.restriction) + item['restrictions'] = restrictions + + modifiedGrants.append(item) + grants = modifiedGrants + + # When listing ingest rights we also want to show transfer rights + # The only way I've found to get the related transfer of a SIP is looking into the File table + if section is "ingest": + try: + transfer_uuid = models.File.objects.filter(sip__uuid__exact=uuid)[0].transfer.uuid + transfer_grants = models.RightsStatementRightsGranted.objects.filter( + rightsstatement__metadataappliestotype__exact=types['transfer'], + rightsstatement__metadataappliestoidentifier__exact=transfer_uuid + ) + except: + pass + + return render(request, 'main/rights_list.html', locals()) + +""" @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ + Ingest + @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ """ + +def ingest_grid(request): + polling_interval = django_settings.POLLING_INTERVAL + microservices_help = django_settings.MICROSERVICES_HELP + return render(request, 'main/ingest/grid.html', locals()) + +def ingest_status(request, uuid=None): + # Equivalent to: "SELECT SIPUUID, MAX(createdTime) AS latest FROM Jobs WHERE unitType='unitSIP' GROUP BY SIPUUID + objects = models.Job.objects.filter(hidden=False, subjobof='').values('sipuuid').annotate(timestamp=Max('createdtime')).exclude(sipuuid__icontains = 'None').filter(unittype__exact = 'unitSIP') + mcp_available = False + try: + client = MCPClient() + mcp_status = etree.XML(client.list()) + mcp_available = True + except Exception: pass + def encoder(obj): + items = [] + for item in obj: + # Check if hidden (TODO: this method is slow) + if models.SIP.objects.is_hidden(item['sipuuid']): + continue + jobs = get_jobs_by_sipuuid(item['sipuuid']) + item['directory'] = utils.get_directory_name(jobs[0]) + item['timestamp'] = calendar.timegm(item['timestamp'].timetuple()) + item['uuid'] = item['sipuuid'] + item['id'] = item['sipuuid'] + del item['sipuuid'] + item['jobs'] = [] + for job in jobs: + newJob = {} + item['jobs'].append(newJob) + + # allow user to know name of file that has failed normalization + if job.jobtype == 'Access normalization failed - copying' or job.jobtype == 'Preservation normalization failed - copying' or job.jobtype == 'thumbnail normalization failed - copying': + task = models.Task.objects.get(job=job) + newJob['filename'] = task.filename + + newJob['uuid'] = job.jobuuid + newJob['type'] = job.jobtype #map_known_values(job.jobtype) + newJob['microservicegroup'] = job.microservicegroup + newJob['subjobof'] = job.subjobof + newJob['currentstep'] = job.currentstep #map_known_values(job.currentstep) + newJob['timestamp'] = '%d.%s' % (calendar.timegm(job.createdtime.timetuple()), str(job.createdtimedec).split('.')[-1]) + try: mcp_status + except NameError: pass + else: + xml_unit = mcp_status.xpath('choicesAvailableForUnit[UUID="%s"]' % job.jobuuid) + if xml_unit: + xml_unit_choices = xml_unit[0].findall('choices/choice') + choices = {} + for choice in xml_unit_choices: + choices[choice.find("chainAvailable").text] = choice.find("description").text + newJob['choices'] = choices + items.append(item) + return items + response = {} + response['objects'] = objects + response['mcp'] = mcp_available + return HttpResponse(simplejson.JSONEncoder(default=encoder).encode(response), mimetype='application/json') + +@decorators.load_jobs # Adds jobs, name +def ingest_metadata_list(request, uuid, jobs, name): + # See MetadataAppliesToTypes table + # types = { 'ingest': 1, 'transfer': 2, 'file': 3 } + metadata = models.DublinCore.objects.filter(metadataappliestotype__exact=1, metadataappliestoidentifier__exact=uuid) + + return render(request, 'main/ingest/metadata_list.html', locals()) + +def ingest_metadata_edit(request, uuid, id=None): + if id: + dc = models.DublinCore.objects.get(pk=id) + else: + # Right now we only support linking metadata to the Ingest + try: + dc = models.DublinCore.objects.get_sip_metadata(uuid) + return HttpResponseRedirect(reverse('main.views.ingest_metadata_edit', args=[uuid, dc.id])) + except ObjectDoesNotExist: + dc = models.DublinCore(metadataappliestotype=1, metadataappliestoidentifier=uuid) + + fields = ['title', 'creator', 'subject', 'description', 'publisher', + 'contributor', 'date', 'type', 'format', 'identifier', + 'source', 'relation', 'language', 'coverage', 'rights'] + + if request.method == 'POST': + form = forms.DublinCoreMetadataForm(request.POST) + if form.is_valid(): + for item in fields: + setattr(dc, item, form.cleaned_data[item]) + dc.save() + return HttpResponseRedirect(reverse('main.views.ingest_metadata_list', args=[uuid])) + else: + initial = {} + for item in fields: + initial[item] = getattr(dc, item) + form = forms.DublinCoreMetadataForm(initial=initial) + jobs = models.Job.objects.filter(sipuuid=uuid) + name = utils.get_directory_name(jobs[0]) + + return render(request, 'main/ingest/metadata_edit.html', locals()) + +def ingest_metadata_delete(request, uuid, id): + try: + models.DublinCore.objects.get(pk=id).delete() + return HttpResponseRedirect(reverse('main.views.ingest_detail', args=[uuid])) + except: + raise Http404 + +def ingest_detail(request, uuid): + jobs = models.Job.objects.filter(sipuuid=uuid) + is_waiting = jobs.filter(currentstep='Awaiting decision').count() > 0 + name = utils.get_directory_name(jobs[0]) + return render(request, 'main/ingest/detail.html', locals()) + +def ingest_microservices(request, uuid): + jobs = models.Job.objects.filter(sipuuid=uuid) + name = utils.get_directory_name(jobs[0]) + return render(request, 'main/ingest/microservices.html', locals()) + +def ingest_rights_list(request, uuid): + return rights_list(request, uuid, 'ingest') + +def ingest_rights_edit(request, uuid, id=None): + return rights_edit(request, uuid, id, 'ingest') + +def ingest_rights_delete(request, uuid, id): + return rights_delete(request, uuid, id, 'ingest') + +def ingest_rights_grants_edit(request, uuid, id): + return rights_grants_edit(request, uuid, id, 'ingest') + +def ingest_delete(request, uuid): + try: + sip = models.SIP.objects.get(uuid__exact=uuid) + sip.hidden = True + sip.save() + response = simplejson.JSONEncoder().encode({ 'removed': True }) + return HttpResponse(response, mimetype='application/json') + except: + raise Http404 + +def ingest_upload(request, uuid): + """ + The upload DIP is actually not executed here, but some data is storaged + in the database (permalink, ...), used later by upload-qubit.py + - GET = It could be used to obtain DIP size + - POST = Create Accesses tuple with permalink + """ + try: + sip = models.SIP.objects.get(uuid__exact=uuid) + except: + raise Http404 + + if request.method == 'POST': + if 'target' in request.POST: + try: + access = models.Access.objects.get(sipuuid=uuid) + except: + access = models.Access(sipuuid=uuid) + access.target = cPickle.dumps({ + "target": request.POST['target'], + "intermediate": request.POST['intermediate'] == "true" }) + access.save() + response = simplejson.JSONEncoder().encode({ 'ready': True }) + return HttpResponse(response, mimetype='application/json') + elif request.method == 'GET': + try: + access = models.Access.objects.get(sipuuid=uuid) + data = cPickle.loads(str(access.target)) + except: + # pass + raise Http404 + # Disabled, it could be very slow + # job = models.Job.objects.get(jobtype='Upload DIP', sipuuid=uuid) + # data['size'] = utils.get_directory_size(job.directory) + response = simplejson.JSONEncoder().encode(data) + return HttpResponse(response, mimetype='application/json') + + return HttpResponseBadRequest() + + +def ingest_normalization_report(request, uuid): + query = getNormalizationReportQuery() + cursor = connection.cursor() + cursor.execute(query, ( uuid, uuid, uuid, uuid, uuid, uuid, uuid, uuid )) + objects = dictfetchall(cursor) + + return render(request, 'main/normalization_report.html', locals()) + +def ingest_browse_normalization(request, jobuuid): + jobs = models.Job.objects.filter(jobuuid=jobuuid) + job = jobs[0] + title = 'Review normalization' + name = utils.get_directory_name(job) + directory = '/var/archivematica/sharedDirectory/watchedDirectories/approveNormalization' + + return render(request, 'main/ingest/aip_browse.html', locals()) + +def ingest_browse_aip(request, jobuuid): + """ + jobs = models.Job.objects.filter(jobuuid=jobuuid) + + if jobs.count() == 0: + raise Http404 + + job = jobs[0] + sipuuid = job.sipuuid + + sips = models.SIP.objects.filter(uuid=sipuuid) + sip = sips[0] + + aipdirectory = sip.currentpath.replace( + '%sharedPath%', + '/var/archivematica/sharedDirectory/' + ) + """ + jobs = models.Job.objects.filter(jobuuid=jobuuid) + job = jobs[0] + title = 'Review AIP' + name = utils.get_directory_name(job) + directory = '/var/archivematica/sharedDirectory/watchedDirectories/storeAIP' + + return render(request, 'main/ingest/aip_browse.html', locals()) + +""" @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ + Transfer + @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ """ + +def transfer_grid(request): + if models.SourceDirectory.objects.count() > 0: + source_directories = models.SourceDirectory.objects.all() + + polling_interval = django_settings.POLLING_INTERVAL + microservices_help = django_settings.MICROSERVICES_HELP + return render(request, 'main/transfer/grid.html', locals()) + +# get rid of this? +def transfer_select(request, source_directory_id): + source_directory = models.SourceDirectory.objects.get(pk=source_directory_id) + # TODO: check that path exists + directory = source_directory.path + return render(request, 'main/transfer/select_directory.html', locals()) + +def transfer_browser(request): + originals_directory = '/var/archivematica/sharedDirectory/transferBackups/originals' + arrange_directory = '/var/archivematica/sharedDirectory/transferBackups/arrange' + if not os.path.exists(originals_directory): + os.mkdir(directory) + if not os.path.exists(arrange_directory): + os.mkdir(arrange_directory) + return render(request, 'main/transfer/browser.html', locals()) + +def transfer_status(request, uuid=None): + # Equivalent to: "SELECT SIPUUID, MAX(createdTime) AS latest FROM Jobs GROUP BY SIPUUID + objects = models.Job.objects.filter(hidden=False, subjobof='', unittype__exact='unitTransfer').values('sipuuid').annotate(timestamp=Max('createdtime')).exclude(sipuuid__icontains = 'None') + mcp_available = False + try: + client = MCPClient() + mcp_status = etree.XML(client.list()) + mcp_available = True + except Exception: pass + def encoder(obj): + items = [] + for item in obj: + # Check if hidden (TODO: this method is slow) + if models.Transfer.objects.is_hidden(item['sipuuid']): + continue + jobs = get_jobs_by_sipuuid(item['sipuuid']) + item['directory'] = os.path.basename(utils.get_directory_name(jobs[0])) + item['timestamp'] = calendar.timegm(item['timestamp'].timetuple()) + item['uuid'] = item['sipuuid'] + item['id'] = item['sipuuid'] + del item['sipuuid'] + item['jobs'] = [] + for job in jobs: + newJob = {} + item['jobs'].append(newJob) + newJob['uuid'] = job.jobuuid + newJob['type'] = job.jobtype #map_known_values(job.jobtype) + newJob['microservicegroup'] = job.microservicegroup + newJob['subjobof'] = job.subjobof + newJob['currentstep'] = job.currentstep #map_known_values(job.currentstep) + newJob['timestamp'] = '%d.%s' % (calendar.timegm(job.createdtime.timetuple()), str(job.createdtimedec).split('.')[-1]) + try: mcp_status + except NameError: pass + else: + xml_unit = mcp_status.xpath('choicesAvailableForUnit[UUID="%s"]' % job.jobuuid) + if xml_unit: + xml_unit_choices = xml_unit[0].findall('choices/choice') + choices = {} + for choice in xml_unit_choices: + choices[choice.find("chainAvailable").text] = choice.find("description").text + newJob['choices'] = choices + items.append(item) + return items + response = {} + response['objects'] = objects + response['mcp'] = mcp_available + return HttpResponse(simplejson.JSONEncoder(default=encoder).encode(response), mimetype='application/json') + +def transfer_detail(request, uuid): + jobs = models.Job.objects.filter(sipuuid=uuid) + name = utils.get_directory_name(jobs[0]) + is_waiting = jobs.filter(currentstep='Awaiting decision').count() > 0 + return render(request, 'main/transfer/detail.html', locals()) + +def transfer_microservices(request, uuid): + jobs = models.Job.objects.filter(sipuuid=uuid) + name = utils.get_directory_name(jobs[0]) + return render(request, 'main/transfer/microservices.html', locals()) + +def transfer_rights_list(request, uuid): + return rights_list(request, uuid, 'transfer') + +def transfer_rights_edit(request, uuid, id=None): + return rights_edit(request, uuid, id, 'transfer') + +def transfer_rights_delete(request, uuid, id): + return rights_delete(request, uuid, id, 'transfer') + +def transfer_rights_grants_edit(request, uuid, id): + return rights_grants_edit(request, uuid, id, 'transfer') + +def transfer_delete(request, uuid): + try: + transfer = models.Transfer.objects.get(uuid__exact=uuid) + transfer.hidden = True + transfer.save() + response = simplejson.JSONEncoder().encode({'removed': True}) + return HttpResponse(response, mimetype='application/json') + except: + raise Http404 + +""" @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ + Access + @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ """ + +def access_list(request): + access = models.Access.objects.all() + return render(request, 'main/access.html', locals()) + +def access_delete(request, id): + access = get_object_or_404(models.Access, pk=id) + access.delete() + return HttpResponseRedirect(reverse('main.views.access_list')) + +""" @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ + Administration + @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ """ + +def administration(request): + return HttpResponseRedirect(reverse('main.views.administration_dip')) + +def administration_search(request): + message = request.GET.get('message', '') + aip_files_indexed = archival_storage_indexed_count('aips') + return render(request, 'main/administration/search.html', locals()) + +def administration_search_flush_aips_context(request): + prompt = 'Flush AIP search index?' + cancel_url = reverse("main.views.administration_search") + return RequestContext(request, {'action': 'Flush', 'prompt': prompt, 'cancel_url': cancel_url}) + +@decorators.confirm_required('simple_confirm.html', administration_search_flush_aips_context) +@user_passes_test(lambda u: u.is_superuser, login_url='/forbidden/') +def administration_search_flush_aips(request): + conn = pyes.ES('127.0.0.1:9200') + index = 'aips' + + try: + conn.delete_index(index) + message = 'AIP search index flushed.' + try: + conn.create_index(index) + except pyes.exceptions.IndexAlreadyExistsException: + message = 'Error recreating AIP search index.' + + except: + message = 'Error flushing AIP search index.' + pass + + params = urllib.urlencode({'message': message}) + return HttpResponseRedirect(reverse("main.views.administration_search") + "?%s" % params) + +def administration_dip(request): + upload_setting = models.StandardTaskConfig.objects.get(execute="upload-qubit_v0.0") + return render(request, 'main/administration/dip.html', locals()) + +def administration_dip_edit(request, id): + if request.method == 'POST': + upload_setting = models.StandardTaskConfig.objects.get(pk=id) + form = forms.AdministrationForm(request.POST) + if form.is_valid(): + upload_setting.arguments = form.cleaned_data['arguments'] + upload_setting.save() + + return HttpResponseRedirect(reverse("main.views.administration_dip")) + +def administration_atom_dips(request): + link_id = administration_atom_dip_destination_select_link_id() + ReplaceDirChoices = models.MicroServiceChoiceReplacementDic.objects.filter(choiceavailableatlink=link_id) + + ReplaceDirChoiceFormSet = administration_dips_formset() + + valid_submission, formset = administration_dips_handle_updates(request, link_id, ReplaceDirChoiceFormSet) + + if request.method != 'POST' or valid_submission: + formset = ReplaceDirChoiceFormSet(queryset=ReplaceDirChoices) + + return render(request, 'main/administration/dips_edit.html', locals()) + +def administration_contentdm_dips(request): + link_id = administration_contentdm_dip_destination_select_link_id() + ReplaceDirChoices = models.MicroServiceChoiceReplacementDic.objects.filter(choiceavailableatlink=link_id) + + ReplaceDirChoiceFormSet = administration_dips_formset() + + valid_submission, formset = administration_dips_handle_updates(request, link_id, ReplaceDirChoiceFormSet) + + if request.method != 'POST' or valid_submission: + formset = ReplaceDirChoiceFormSet(queryset=ReplaceDirChoices) + + return render(request, 'main/administration/dips_contentdm_edit.html', locals()) + +def administration_atom_dip_destination_select_link_id(): + taskconfigs = models.TaskConfig.objects.filter(description='Select DIP upload destination') + taskconfig = taskconfigs[0] + links = models.MicroServiceChainLink.objects.filter(currenttask=taskconfig.id) + link = links[0] + return link.id + +def administration_contentdm_dip_destination_select_link_id(): + taskconfigs = models.TaskConfig.objects.filter(description='Select target CONTENTdm server') + taskconfig = taskconfigs[0] + links = models.MicroServiceChainLink.objects.filter(currenttask=taskconfig.id) + link = links[0] + return link.id + +def administration_dips_formset(): + return modelformset_factory( + models.MicroServiceChoiceReplacementDic, + form=forms.MicroServiceChoiceReplacementDicForm, + extra=1, + can_delete=True + ) + +def administration_dips_handle_updates(request, link_id, ReplaceDirChoiceFormSet): + valid_submission = True + formset = None + + if request.method == 'POST': + formset = ReplaceDirChoiceFormSet(request.POST) + + # take note of formset validity because if submission was successful + # we reload it to reflect + # deletions, etc. + valid_submission = formset.is_valid() + + if valid_submission: + # save/delete partial data (without association with specific link) + instances = formset.save() + + # restore link association + for instance in instances: + instance.choiceavailableatlink = link_id + instance.save() + return valid_submission, formset + +def administration_sources(request): + return render(request, 'main/administration/sources.html', locals()) + +def administration_sources_json(request): + message = '' + if request.method == 'POST': + path = request.POST.get('path', '') + if path != '': + try: + models.SourceDirectory.objects.get(path=path) + except models.SourceDirectory.DoesNotExist: + # save dir + source_dir = models.SourceDirectory() + source_dir.path = path + source_dir.save() + message = 'Directory added.' + else: + message = 'Directory already added.' + else: + message = 'Path is empty.' + + response = {} + response['message'] = message + response['directories'] = [] + + for directory in models.SourceDirectory.objects.all(): + response['directories'].append({ + 'id': directory.id, + 'path': directory.path + }) + return HttpResponse(simplejson.JSONEncoder().encode(response), mimetype='application/json') + +def administration_sources_delete_json(request, id): + models.SourceDirectory.objects.get(pk=id).delete() + response = {} + response['message'] = 'Deleted.' + return HttpResponse(simplejson.JSONEncoder().encode(response), mimetype='application/json') + #return HttpResponseRedirect(reverse('main.views.administration_sources')) + +def administration_processing(request): + file_path = '/var/archivematica/sharedDirectory/sharedMicroServiceTasksConfigs/processingMCPConfigs/defaultProcessingMCP.xml' + + if request.method == 'POST': + xml = request.POST.get('xml', '') + file = open(file_path, 'w') + file.write(xml) + else: + file = open(file_path, 'r') + xml = file.read() + + return render(request, 'main/administration/processing.html', locals()) + +""" @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ + Misc + @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ """ + +def forbidden(request): + return render(request, 'forbidden.html') + +def task_duration_in_seconds(task): + duration = int(format(task.endtime, 'U')) - int(format(task.starttime, 'U')) + if duration == 0: + duration = '< 1' + return duration + +def task(request, uuid): + task = models.Task.objects.get(taskuuid=uuid) + task.duration = task_duration_in_seconds(task) + objects = [task] + return render(request, 'main/tasks.html', locals()) + +def tasks(request, uuid): + job = models.Job.objects.get(jobuuid=uuid) + objects = job.task_set.all().order_by('-exitcode', '-endtime', '-starttime', '-createdtime') + + # figure out duration in seconds + for object in objects: + object.duration = task_duration_in_seconds(object) + + return render(request, 'main/tasks.html', locals()) + +def map_known_values(value): + #changes should be made in the database, not this map + map = { + # currentStep + 'completedSuccessfully': 'Completed successfully', + 'completedUnsuccessfully': 'Failed', + 'exeCommand': 'Executing command(s)', + 'verificationCommand': 'Executing command(s)', + 'requiresAprroval': 'Requires approval', + 'requiresApproval': 'Requires approval', + # jobType + 'acquireSIP': 'Acquire SIP', + 'addDCToMETS': 'Add DC to METS', + 'appraiseSIP': 'Appraise SIP', + 'assignSIPUUID': 'Asign SIP UUID', + 'assignUUID': 'Assign file UUIDs and checksums', + 'bagit': 'Bagit', + 'cleanupAIPPostBagit': 'Cleanup AIP post bagit', + 'compileMETS': 'Compile METS', + 'copyMETSToDIP': 'Copy METS to DIP', + 'createAIPChecksum': 'Create AIP checksum', + 'createDIPDirectory': 'Create DIP directory', + 'createOrMoveDC': 'Create or move DC', + 'createSIPBackup': 'Create SIP backup', + 'detoxFileNames': 'Detox filenames', + 'extractPackage': 'Extract package', + 'FITS': 'FITS', + 'normalize': 'Normalize', + 'Normalization Failed': 'Normalization failed', + 'quarantine': 'Place in quarantine', + 'reviewSIP': 'Review SIP', + 'scanForRemovedFilesPostAppraiseSIPForPreservation': 'Scan for removed files post appraise SIP for preservation', + 'scanForRemovedFilesPostAppraiseSIPForSubmission': 'Scan for removed files post appraise SIP for submission', + 'scanWithClamAV': 'Scan with ClamAV', + 'seperateDIP': 'Seperate DIP', + 'storeAIP': 'Store AIP', + 'unquarantine': 'Remove from Quarantine', + 'Upload DIP': 'Upload DIP', + 'verifyChecksum': 'Verify checksum', + 'verifyMetadataDirectoryChecksums': 'Verify metadata directory checksums', + 'verifySIPCompliance': 'Verify SIP compliance', + } + if value in map: + return map[value] + else: + return value + +def get_jobs_by_sipuuid(uuid): + jobs = models.Job.objects.filter(sipuuid=uuid).order_by('-createdtime') + priorities = { + 'completedUnsuccessfully': 0, + 'requiresAprroval': 1, + 'requiresApproval': 1, + 'exeCommand': 2, + 'verificationCommand': 3, + 'completedSuccessfully': 4, + 'cleanupSuccessfulCommand': 5, + } + def get_priority(job): + try: return priorities[job.currentstep] + except Exception: return 0 + return sorted(jobs, key = get_priority) # key = lambda job: priorities[job.currentstep] + +def jobs_list_objects(request, uuid): + response = [] + job = models.Job.objects.get(jobuuid=uuid) + + for root, dirs, files in os.walk(job.directory + '/objects', False): + for name in files: + directory = root.replace(job.directory + '/objects', '') + response.append(os.path.join(directory, name)) + + return HttpResponse(simplejson.JSONEncoder().encode(response), mimetype='application/json') + +def jobs_explore(request, uuid): + # Database query + job = models.Job.objects.get(jobuuid=uuid) + # Prepare response object + contents = [] + response = {} + response['contents'] = contents + # Parse request + if 'path' in request.REQUEST and len(request.REQUEST['path']) > 0: + directory = os.path.join(job.directory, request.REQUEST['path']) + response['base'] = request.REQUEST['path'].replace('.', '') + else: + directory = job.directory + response['base'] = '' + # Build directory + directory = os.path.abspath(directory) + # Security check + tmpDirectory = os.path.realpath(directory) + while True: + if tmpDirectory == os.path.realpath(job.directory): + break + elif tmpDirectory == '/': + raise Http404 + else: + tmpDirectory = os.path.dirname(tmpDirectory) + # If it is a file, return the contents + if os.path.isfile(directory): + mime = subprocess.Popen('/usr/bin/file --mime-type ' + directory, shell=True, stdout=subprocess.PIPE).communicate()[0].split(' ')[-1].strip() + response = HttpResponse(mimetype=mime) + response['Content-Disposition'] = 'attachment; filename=%s' % os.path.basename(directory) + with open(directory) as resource: + response.write(resource.read()) + return response + # Cleaning path + parentDir = os.path.dirname(directory) + parentDir = parentDir.replace('%s/' % job.directory, '') + parentDir = parentDir.replace('%s' % job.directory, '') + response['parent'] = parentDir + # Check if it is or not the root dir to add the "Go parent" link + if os.path.realpath(directory) != os.path.realpath(job.directory): + parent = {} + parent['name'] = 'Go to parent directory...' + parent['type'] = 'parent' + contents.append(parent) + # Add contents of the directory + for item in os.listdir(directory): + newItem = {} + newItem['name'] = item + if os.path.isdir(os.path.join(directory, item)): + newItem['type'] = 'dir' + else: + newItem['type'] = 'file' + newItem['size'] = os.path.getsize(os.path.join(directory, item)) + contents.append(newItem) + return HttpResponse(simplejson.JSONEncoder().encode(response), mimetype='application/json') + +def formdata_delete(request, type, parent_id, delete_id): + return formdata(request, type, parent_id, delete_id) + +def formdata(request, type, parent_id, delete_id = None): + model = None + results = None + response = {} + + # define types handled + if (type == 'rightsnote'): + model = models.RightsStatementRightsGrantedNote + parent_model = models.RightsStatementRightsGranted + model_parent_field = 'rightsgranted' + model_value_fields = ['rightsgrantednote'] + + results = model.objects.filter(rightsgranted=parent_id) + + if (type == 'rightsrestriction'): + model = models.RightsStatementRightsGrantedRestriction + parent_model = models.RightsStatementRightsGranted + model_parent_field = 'rightsgranted' + model_value_fields = ['restriction'] + + results = model.objects.filter(rightsgranted=parent_id) + + if (type == 'licensenote'): + model = models.RightsStatementLicenseNote + parent_model = models.RightsStatementLicense + model_parent_field = 'rightsstatementlicense' + model_value_fields = ['licensenote'] + + results = model.objects.filter(rightsstatementlicense=parent_id) + + if (type == 'statutenote'): + model = models.RightsStatementStatuteInformationNote + parent_model = models.RightsStatementStatuteInformation + model_parent_field = 'rightsstatementstatute' + model_value_fields = ['statutenote'] + + results = model.objects.filter(rightsstatementstatute=parent_id) + + if (type == 'copyrightnote'): + model = models.RightsStatementCopyrightNote + parent_model = models.RightsStatementCopyright + model_parent_field = 'rightscopyright' + model_value_fields = ['copyrightnote'] + + results = model.objects.filter(rightscopyright=parent_id) + + if (type == 'copyrightdocumentationidentifier'): + model = models.RightsStatementCopyrightDocumentationIdentifier + parent_model = models.RightsStatementCopyright + model_parent_field = 'rightscopyright' + model_value_fields = [ + 'copyrightdocumentationidentifiertype', + 'copyrightdocumentationidentifiervalue', + 'copyrightdocumentationidentifierrole' + ] + + results = model.objects.filter(rightscopyright=parent_id) + + if (type == 'statutedocumentationidentifier'): + model = models.RightsStatementStatuteDocumentationIdentifier + parent_model = models.RightsStatementStatuteInformation + model_parent_field = 'rightsstatementstatute' + model_value_fields = [ + 'statutedocumentationidentifiertype', + 'statutedocumentationidentifiervalue', + 'statutedocumentationidentifierrole' + ] + + results = model.objects.filter(rightsstatementstatute=parent_id) + + if (type == 'licensedocumentationidentifier'): + model = models.RightsStatementLicenseDocumentationIdentifier + parent_model = models.RightsStatementLicense + model_parent_field = 'rightsstatementlicense' + model_value_fields = [ + 'licensedocumentationidentifiertype', + 'licensedocumentationidentifiervalue', + 'licensedocumentationidentifierrole' + ] + + results = model.objects.filter(rightsstatementlicense=parent_id) + + if (type == 'otherrightsdocumentationidentifier'): + model = models.RightsStatementOtherRightsDocumentationIdentifier + parent_model = models.RightsStatementOtherRightsInformation + model_parent_field = 'rightsstatementotherrights' + model_value_fields = [ + 'otherrightsdocumentationidentifiertype', + 'otherrightsdocumentationidentifiervalue', + 'otherrightsdocumentationidentifierrole' + ] + + results = model.objects.filter(rightsstatementotherrights=parent_id) + + if (type == 'otherrightsnote'): + model = models.RightsStatementOtherRightsInformationNote + parent_model = models.RightsStatementOtherRightsInformation + model_parent_field = 'rightsstatementotherrights' + model_value_fields = ['otherrightsnote'] + + results = model.objects.filter(rightsstatementotherrights=parent_id) + + # handle creation + if (request.method == 'POST'): + # load or initiate model instance + id = request.POST.get('id', 0) + if id > 0: + instance = model.objects.get(pk=id) + else: + instance = model() + + # set instance parent + parent = parent_model.objects.filter(pk=parent_id) + setattr(instance, model_parent_field, parent[0]) + + # set instance field values using request data + for field in model_value_fields: + value = request.POST.get(field, '') + setattr(instance, field, value) + instance.save() + + if id == 0: + response['new_id'] = instance.pk + + response['message'] = 'Added.' + + # handle deletion + if (request.method == 'DELETE'): + if (delete_id == None): + response['message'] = 'Error: no delete ID supplied.' + else: + model.objects.filter(pk=delete_id).delete() + response['message'] = 'Deleted.' + + # send back revised data + if (results != None): + response['results'] = [] + for result in results: + values = {} + for field in model_value_fields: + values[field] = result.__dict__[field] + response['results'].append({ + 'id': result.pk, + 'values': values + }); + + if (model == None): + response['message'] = 'Incorrect type.' + + return HttpResponse(simplejson.JSONEncoder().encode(response), mimetype='application/json') + +def chain_insert(): + # first choice + standardTaskConfig = models.StandardTaskConfig() + standardTaskConfig.save() + + taskConfig = models.TaskConfig() + taskConfig.tasktype = 5 + taskConfig.tasktypepkreference = standardTaskConfig.id + taskConfig.description = 'Select DIP upload destination' + taskConfig.save() + + link = models.MicroServiceChainLink() + link.microservicegroup = 'Upload DIP' + link.currenttask = taskConfig.id + link.save() + choice_link_id = link.id + + choice = models.MicroServiceChoiceReplacementDic() + choice.choiceavailableatlink = link.id + choice.description = 'Test dict 1' + choice.replacementdic = '{}' + choice.save() + + choice = models.MicroServiceChoiceReplacementDic() + choice.choiceavailableatlink = link.id + choice.description = 'Test dict 2' + choice.replacementdic = '{}' + choice.save() + + # take note of ID of existing chain to points to ICA AtoM DIP upload links + #chains = models.MicroServiceChain.objects.filter(description='Upload DIP to ICA-ATOM') + #chain = chains[0] + #upload_start_link_id = chain.startinglink + #chain.startinglink = choice_link_id + #chain.description = 'Select Upload Destination' + #chain.save() + + + # make new chain to point to ICA AtoM DIP upload links + chain = models.MicroServiceChain() + chain.startinglink = choice_link_id + chain.description = 'Select DIP destination' + chain.save() + + # rewire old choice to point to new chain + choices = models.MicroServiceChainChoice.objects.filter(chainavailable=23) + choice = choices[0] + choice.chainavailable = chain.id + choice.save() + + # add exit code to the choice link that points to the Qubit upload link + code = models.MicroServiceChainLinkExitCode() + code.exitcode = 0 + code.microservicechainlink = choice_link_id + code.nextmicroservicechainlink = 4 + code.exitmessage = 'Completed successfully' + code.save() + + diff --git a/src/dashboard/src/main/views_NormalizationReport.py b/src/dashboard/src/main/views_NormalizationReport.py new file mode 100644 index 0000000000..baa24815ff --- /dev/null +++ b/src/dashboard/src/main/views_NormalizationReport.py @@ -0,0 +1,141 @@ +#!/usr/bin/python -OO +# -*- coding: utf-8 -*- +# +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +# @package Archivematica +# @subpackage Dashboard +# @author Joseph Perry +# @version svn: $Id$ + +def getNormalizationReportQuery(): + return """ +SELECT + + Tasks.fileUUID AS U, + Tasks.fileName, + + (SELECT IF(Tasks.taskUUID IS NULL, '', Tasks.taskUUID) + FROM Tasks + JOIN Jobs ON Tasks.jobUUID = Jobs.jobUUID + WHERE + Jobs.SIPUUID = %s AND + Jobs.jobType = 'Normalize access' AND + Tasks.fileUUID = U + ) AS 'access_normalization_task_uuid', + + (SELECT IF(Tasks.taskUUID IS NULL, '', Tasks.taskUUID) + FROM Tasks + JOIN Jobs ON Tasks.jobUUID = Jobs.jobUUID + WHERE + Jobs.SIPUUID = %s AND + Jobs.jobType = 'Normalize preservation' AND + Tasks.fileUUID = U + ) AS 'preservation_normalization_task_uuid', + + Tasks.fileUUID IN ( + SELECT Tasks.fileUUID + FROM Tasks + JOIN Jobs ON Tasks.jobUUID = Jobs.jobUUID + WHERE + Jobs.SIPUUID = %s AND + Jobs.jobType = 'Normalize preservation' AND + Jobs.MicroServiceChainLinksPK NOT IN (SELECT MicroserviceChainLink FROM DefaultCommandsForClassifications ) AND + Tasks.stdOut LIKE '%%[Command]%%') + AS 'preservation_normalization_attempted', + + ( + SELECT Tasks.exitCode + FROM Tasks + JOIN Jobs ON Tasks.jobUUID = Jobs.jobUUID + WHERE + Jobs.SIPUUID = %s AND + Jobs.jobType = 'Normalize preservation' AND + Tasks.fileUUID = U + ) != 0 + AS 'preservation_normalization_failed', + + filesPreservationAccessFormatStatus.inPreservationFormat AS 'already_in_preservation_format', + + Tasks.fileUUID NOT IN ( + SELECT Tasks.fileUUID + FROM Tasks + JOIN Jobs ON Tasks.jobUUID = Jobs.jobUUID + WHERE + Jobs.SIPUUID = %s AND + Tasks.exec = 'transcoderNormalizeAccess_v0.0' AND + Tasks.stdOut LIKE '%%description: Copying File.%%') AND + Tasks.fileUUID IN ( + SELECT Tasks.fileUUID + FROM Tasks + JOIN Jobs ON Tasks.jobUUID = Jobs.jobUUID + WHERE + Jobs.SIPUUID = %s AND + Jobs.jobType = 'Normalize access' AND + Tasks.stdOut LIKE '%%[Command]%%' AND + Jobs.MicroServiceChainLinksPK NOT IN (SELECT MicroserviceChainLink FROM DefaultCommandsForClassifications ) AND + Tasks.stdOut NOT LIKE '%%Not including %% in DIP.%%' ) + AS 'access_normalization_attempted', + + ( + SELECT Tasks.exitCode + FROM Tasks + JOIN Jobs ON Tasks.jobUUID = Jobs.jobUUID + WHERE + Jobs.SIPUUID = %s AND + Jobs.jobType = 'Normalize access' AND + Tasks.fileUUID = U + ) != 0 + AS 'access_normalization_failed', + + filesPreservationAccessFormatStatus.inAccessFormat AS 'already_in_access_format', + + ( + SELECT Files.originalLocation + FROM Files + WHERE + Files.fileUUID = U + ) + AS 'location', + + Tasks.jobUUID AS 'jobUUID' + + FROM Files + LEFT OUTER JOIN Tasks ON Files.fileUUID = Tasks.fileUUID + LEFT OUTER JOIN Jobs ON Tasks.jobUUID = Jobs.jobUUID + LEFT OUTER JOIN filesPreservationAccessFormatStatus ON filesPreservationAccessFormatStatus.fileUUID = Files.fileUUID + WHERE + Jobs.SIPUUID = %s AND + Files.fileGrpUse != 'preservation' AND + Files.currentLocation LIKE '\%%SIPDirectory\%%objects/%%' + GROUP BY Tasks.fileUUID + ORDER BY Tasks.fileName; +""" + +if __name__ == '__main__': + import sys + uuid = "'%s'" % (sys.argv[1]) + sys.path.append("/usr/lib/archivematica/archivematicaCommon") + import databaseInterface + print "testing normalization report" + sql = getNormalizationReportQuery() + sql = sql % ( uuid, uuid, uuid, uuid, uuid, uuid, uuid, uuid ) + rows = databaseInterface.queryAllSQL(sql) + for row in rows: + print row + print diff --git a/src/dashboard/src/manage.py b/src/dashboard/src/manage.py new file mode 100644 index 0000000000..bcdd55e27b --- /dev/null +++ b/src/dashboard/src/manage.py @@ -0,0 +1,11 @@ +#!/usr/bin/python +from django.core.management import execute_manager +try: + import settings # Assumed to be in the same directory. +except ImportError: + import sys + sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__) + sys.exit(1) + +if __name__ == "__main__": + execute_manager(settings) diff --git a/src/dashboard/src/mcp/__init__.py b/src/dashboard/src/mcp/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/dashboard/src/mcp/urls.py b/src/dashboard/src/mcp/urls.py new file mode 100644 index 0000000000..9aef58b242 --- /dev/null +++ b/src/dashboard/src/mcp/urls.py @@ -0,0 +1,6 @@ +from django.conf.urls.defaults import patterns + +urlpatterns = patterns('mcp.views', + (r'execute/$', 'execute'), + (r'list/$', 'list'), +) diff --git a/src/dashboard/src/mcp/views.py b/src/dashboard/src/mcp/views.py new file mode 100644 index 0000000000..fc8a761d71 --- /dev/null +++ b/src/dashboard/src/mcp/views.py @@ -0,0 +1,39 @@ +# This file is part of Archivematica. +# +# Copyright 2010-2012 Artefactual Systems Inc. +# +# Archivematica is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Archivematica is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Archivematica. If not, see . + +from django.http import HttpResponse +from contrib.mcp.client import MCPClient +from lxml import etree + +def execute(request): + result = '' + if 'uuid' in request.REQUEST: + client = MCPClient() + uuid = request.REQUEST.get('uuid', '') + choice = request.REQUEST.get('choice', '') + result = client.execute(uuid, choice) + return HttpResponse(result, mimetype = 'text/plain') + +def list(request): + client = MCPClient() + jobs = etree.XML(client.list()) + response = '' + if 0 < len(jobs): + for job in jobs: + response += etree.tostring(job) + response = '%s' % response + return HttpResponse(response, mimetype = 'text/xml') diff --git a/src/dashboard/src/media/css/archival_storage.css b/src/dashboard/src/media/css/archival_storage.css new file mode 100644 index 0000000000..2c51b006ed --- /dev/null +++ b/src/dashboard/src/media/css/archival_storage.css @@ -0,0 +1,84 @@ +td, th { + border: 1px solid #eee; + text-align: left; +} + +th > a.selected { + color: Black; +} + +thead > tr > th { + background-color: #ddd; +} + +td.link-sip > a { + background: transparent url(/media/images/package_go.png) no-repeat 0% 50%; + padding-left: 22px; + height: 16px; + display: block; +} + +td.uuid, .uuid { + font-family: "Courier New", Courier, "Lucida Console", monospace; +} + +th > div { + position: relative; +} + +th > div > a { + display: block; + text-indent: -10000px; + position: absolute; + width: 16px; + height: 16px; +} + +th > div > a.up { + background: transparent url(/media/images/bullet_arrow_up.png) no-repeat 0% 0%; + top: -8px; + right: -4px; +} + +th > div > a.down { + background: transparent url(/media/images/bullet_arrow_down.png) no-repeat 0% 0%; + top: 6px; + right: -4px; +} + +th > div > a.selected_up { + background-image: url(/media/images/bullet_orange.png); +} + +th > div > a.selected_down { + background-image: url(/media/images/bullet_orange.png); +} + +tbody > tr:last-child > td { + border-bottom: 2px solid #ddd; +} + +tfoot > tr > td { + border: 0px; +} + +td.size { + text-align: right; +} + +/* AIP search */ +#aip-search-summary { + margin-bottom: 1em; + padding: .5em; + background-color: whitesmoke; +} + +.aip-search-item-details { + margin-top: 1em; + margin-bottom: 1em; + margin-left: 2em; +} + +.aip-search-item-detail-field { + margin-right: 1em; +} diff --git a/src/dashboard/src/media/css/backbone-file-explorer.css b/src/dashboard/src/media/css/backbone-file-explorer.css new file mode 100644 index 0000000000..a00fc4209e --- /dev/null +++ b/src/dashboard/src/media/css/backbone-file-explorer.css @@ -0,0 +1,50 @@ +.backbone-file-explorer { + width: 100%; + background-repeat: no-repeat; + background-position: right top; +} + +.backbone-file-explorer-busy { + background-color: '#eee'; + opacity: .5; + background-image: url('../img/ajax-loader.gif'); + background-position: 99% 50%; +} + +.backbone-file-explorer-idle { + background-color: 'white'; + opacity: 1; + background-image: none; +} + +.backbone-file-explorer-entry { + position: relative; + padding: 6px; +} + +.backbone-file-explorer-entry-odd { + background-color: #f5f5f5; +} + +.backbone-file-explorer-directory { + background-image: url('../img/folder.png'); + background-repeat: no-repeat; + background-position: 3px 50%; +} + +.backbone-file-explorer-directory_open { + background-image: url('../img/folderopen.png'); +} + +.backbone-file-explorer-directory_icon_button { + display: block; + float: left; + width: 28px; + height: 16px; +} + +.backbone-file-explorer-directory_entry_actions { + float: right; +} + +.backbone-file-explorer-level { padding-left: 10px; } diff --git a/src/dashboard/src/media/css/base.css b/src/dashboard/src/media/css/base.css new file mode 100644 index 0000000000..fd0b8521e6 --- /dev/null +++ b/src/dashboard/src/media/css/base.css @@ -0,0 +1,8 @@ +/* +Copyright (c) 2010, Yahoo! Inc. All rights reserved. +Code licensed under the BSD License: +http://developer.yahoo.com/yui/license.html +version: 3.2.0 +build: 2676 +*/ +h1{font-size:138.5%;}h2{font-size:123.1%;}h3{font-size:108%;}h1,h2,h3{margin:1em 0;}h1,h2,h3,h4,h5,h6,strong{font-weight:bold;}abbr,acronym{border-bottom:1px dotted #000;cursor:help;}em{font-style:italic;}blockquote,ul,ol,dl{margin:1em;}ol,ul,dl{margin-left:2em;}ol li{list-style:decimal outside;}ul li{list-style:disc outside;}dl dd{margin-left:1em;}th,td{border:1px solid #000;padding:.5em;}th{font-weight:bold;text-align:center;}caption{margin-bottom:.5em;text-align:center;}p,fieldset,table,pre{margin-bottom:1em;}input[type=text],input[type=password],textarea{width:12.25em;*width:11.9em;} \ No newline at end of file diff --git a/src/dashboard/src/media/css/directory_picker.css b/src/dashboard/src/media/css/directory_picker.css new file mode 100644 index 0000000000..b98dbfe7af --- /dev/null +++ b/src/dashboard/src/media/css/directory_picker.css @@ -0,0 +1,26 @@ +.backbone-file-explorer { + width: 100%; + -webkit-box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; +} + +.backbone-file-explorer-busy { + background-image: url('../images/ajax-loader.gif'); +} + +.backbone-file-explorer-directory { + background-image: url('../images/filefolder.png'); +} + +.backbone-file-explorer-directory_open { + background-image: url('../images/filefolderopen.png'); +} + +.backbone-file-explorer-busy { + background-image: url('../images/ajax-loader.gif'); +} + +.backbone-file-explorer-idle { + background-image: none; +} diff --git a/src/dashboard/src/media/css/fonts.css b/src/dashboard/src/media/css/fonts.css new file mode 100644 index 0000000000..b561aa9622 --- /dev/null +++ b/src/dashboard/src/media/css/fonts.css @@ -0,0 +1,8 @@ +/* +Copyright (c) 2010, Yahoo! Inc. All rights reserved. +Code licensed under the BSD License: +http://developer.yahoo.com/yui/license.html +version: 3.2.0 +build: 2676 +*/ +body{font:13px/1.231 arial,helvetica,clean,sans-serif;*font-size:small;*font:x-small;}select,input,button,textarea{font:99% arial,helvetica,clean,sans-serif;}table{font-size:inherit;font:100%;}pre,code,kbd,samp,tt{font-family:monospace;*font-size:108%;line-height:100%;} \ No newline at end of file diff --git a/src/dashboard/src/media/css/reset.css b/src/dashboard/src/media/css/reset.css new file mode 100644 index 0000000000..9633386be2 --- /dev/null +++ b/src/dashboard/src/media/css/reset.css @@ -0,0 +1,8 @@ +/* +Copyright (c) 2010, Yahoo! Inc. All rights reserved. +Code licensed under the BSD License: +http://developer.yahoo.com/yui/license.html +version: 3.2.0 +build: 2676 +*/ +html{color:#000;background:#FFF;}body,div,dl,dt,dd,ul,ol,li,h1,h2,h3,h4,h5,h6,pre,code,form,fieldset,legend,input,textarea,p,blockquote,th,td{margin:0;padding:0;}table{border-collapse:collapse;border-spacing:0;}fieldset,img{border:0;}address,caption,cite,code,dfn,em,strong,th,var{font-style:normal;font-weight:normal;}li{list-style:none;}caption,th{text-align:left;}h1,h2,h3,h4,h5,h6{font-size:100%;font-weight:normal;}q:before,q:after{content:'';}abbr,acronym{border:0;font-variant:normal;}sup{vertical-align:text-top;}sub{vertical-align:text-bottom;}input,textarea,select{font-family:inherit;font-size:inherit;font-weight:inherit;}input,textarea,select{*font-size:100%;}legend{color:#000;} \ No newline at end of file diff --git a/src/dashboard/src/media/css/style.css b/src/dashboard/src/media/css/style.css new file mode 100644 index 0000000000..e5f04ce8e5 --- /dev/null +++ b/src/dashboard/src/media/css/style.css @@ -0,0 +1,699 @@ +body { + padding-top: 60px !important; +} + +.topbar .brand { + background: transparent url(../../media/images/logo_header.png) no-repeat 50% 50%; + width: 229px; + text-indent: -99999em; +} + +.nav li { + position: relative; +} + +.nav li span { + position: absolute; + float: right; + top: 4px; + right: -8px; + -webkit-border-radius: 999px; + -moz-border-radius: 999px; + border-radius: 999px; + background-color: Red; + color: White; + font-weight: bold; + font-size: 9px; + width: 2em; + height: 16px; + text-align: center; + z-index: 10; +} + +li.active { + background-color: #eee; +} + +li.user > a { + cursor: pointer; + border-left: 2px solid #444; +} + +/* Override Bootstrap default */ +.container-fluid > .content { + margin-left: 0px; +} + +/* Tooltips */ + +.tooltip { + position: absolute; + border: 1px solid #333; + background: #f7f5d1; + color: #333; + border-radius: 4px; + -moz-border-radius: 4px; + -webkit-border-radius: 4px; + box-shadow: 4px 4px 4px #999999; + -webkit-box-shadow: 4px 4px 4px #999999; + -moz-box-shadow: 4px 4px 4px #999999; + filter: progid:DXImageTransform.Microsoft.dropShadow(color=#999999, offX=4, offY=4, positive=true); +} + +.tooltip > .tooltip-title { + margin: 0; + white-space: nowrap; + padding: 2px 4px; + background-color: #888; + font-weight: bold; + color: White; +} + +.tooltip > .tooltip-content { + padding: 2px 5px 2px 5px; + margin: 0 0 0 0; +} + +/* Status widget */ + +#connection-status { + +} + +#connection-status > #status-bullet { + position: absolute; + top: 0; + right: 0; + margin-right: 1em; + height: 40px; + line-height: 40px; +} + +#connection-status > #status-bullet > img { + width: 16px; + height: 16px; + margin-bottom: -3px; +} + +#connection-status > #status-bullet > span { + color: #bfbfbf; +} + +/* SIPs */ + +#sip-container { + min-width: 950px; +} + +#sip-header { + font-weight: bold; + padding: 3px 2px; + height: 19px; + margin-bottom: 7px; + background: url(/media/images/dots.gif) repeat-x bottom; +} + +#sip-header > div { float: left; padding: 3px 2px 3px 2px; } +#sip-header-directory { width: 319px; } +#sip-header-uuid { width: 304px; } +#sip-header-timestamp { width: 120px; } +#sip-header-actions { } + +.sip { + width: 100%; + clear: both; + float: left; + border: 1px solid White; +} + +.sip:hover, .sip-selected { + border-color: #bbb; + background-color: #eee; +} + +.sip-new { + background-color: #fedda7; +} + +.sip-removing { + background-color: #f7cdcd; + border-color: #bbb; +} + +.sip-row { cursor: pointer; } +.sip-row > div { float: left; } +.sip-detail-icon-status { width: 26px; padding: 4px 0px; text-align: center; } +.sip-detail-directory {float: left; width: 300px; padding: 4px 0px; display: table-cell; white-space: nowrap; } +.sip-detail-uuid {float: left; width: 310px; padding: 4px 0px; font-family: "Courier New", Courier, "Lucida Console", monospace; } +.sip-detail-timestamp {float: left; width: 180px; padding: 4px 0px; } +.sip-detail-actions {float: left; } + +/* Clearfix in .sip-row */ +.sip-row { *zoom: 1; } +.sip-row:after { clear: both; content: ""; display: table; } + +.sip-detail-directory > abbr { + display: none; + border: 1px dotted #999; + padding: 1px 4px; + margin-left: 8px; +} + +.sip-detail-directory > abbr:hover { + background-color: #fff; +} + +.sip-detail-actions > a { + visibility: hidden; + float: left; + height: 26px; + width: 16px; + margin-right: 4px; + background-color: transparent; + background-repeat: no-repeat; + background-position: center left; +} + +.sip-detail-actions > a > span { + display: none; +} + +.btn_show_metadata { background-image: url(/media/images/zoom.png); } +.sip-selected .btn_show_metadata { background-image: url(/media/images/table_edit.png); } +.btn_remove_sip { background-image: url(/media/images/delete.png); } +.btn_edit_metadata { background-image: url(/media/images/table_edit.png); } + +.sip-removing .sip-detail-actions > a, +.sip:hover .sip-detail-actions > a, +.sip-selected .sip-detail-actions > a { + visibility: visible; +} + +.sip-detail-job-container { + clear: both; + display: none; +} + +.microservice-group { + cursor: pointer; + border-top: 1px solid #ddd; +} + +.job { + border-top: 1px solid #999; + background-color: White; + clear: both; + float: left; + width: 100%; +} + +.job > div { + padding: 2px 5px; + float: left; + line-height: 18px; +} + +.job-detail-microservice { + width: 620px; + border-right: 1px dotted #bbb; +} + +.job-detail-microservice > .job-type-label { + margin-left: 20px; +} + +.job-detail-currentstep { width: 170px; border-right: 1px dotted #bbb; } +.job-detail-actions { } + +.job-detail-actions > a { + display: block; + float: left; + width: 16px; + height: 16px; + background-color: transparent; + background-repeat: no-repeat; + background-position: center left; +} + +.job-detail-actions > a > span { + display: none; +} + +.job-detail-actions > select { + width: 80px; + height: auto; + line-height: normal; + padding: 0; + font-size: 13px; + margin-left: 8px; + border: 1px solid #999; +} + +.job-detail-actions > a { margin-left: 4px; } +.job-detail-actions > .btn_show_tasks { background-image: url(/media/images/cog.png); } +.job-detail-actions > .btn_browse_job { background-image: url(/media/images/folder_magnify.png); } +.job-detail-actions > .btn_approve_job { background-image: url(/media/images/control_play_blue.png); color: Green; } +.job-detail-actions > .btn_reject_job { background-image: url(/media/images/control_stop_blue.png); color: Red; } +.job-detail-actions > .btn_normalization_report { background-image: url(/media/images/table_edit.png); } + +.job-detail-microservice > .title { + -webkit-transform: rotate(-90deg); + -moz-transform: rotate(-90deg); + filter: progid:DXImageTransform.Microsoft.BasicImage(rotation=3); + background-color: Green; + left: 0px; + top: 0px; + text-align: left; +} + +.job-detail-currentstep > div { + margin: 4px 0; +} + +.task-dialog { + font-size: 11px; +} + +.task-dialog > table { + /* width: 640px; */ + width: 100%; +} + +.task-dialog a { + color: Blue; + text-decoration: none; +} + +.task-dialog .stdout .stderror { + background-color: #ddd; + font-family: "Courier New"; +} + +.stderror > pre { + background-color: #f2d8d8; +} + +#polling-notification { + position: absolute; + right: 32px; + top: 54px; + padding: 2px 4px 2px 22px; + font-size: 12px; + color: Red; + background: transparent url(/media/images/clock.png) no-repeat center left; +} + +#directory-browser { + display: none; +} + +#directory-browser-tab { + position: absolute; + top: 74px; + right: 40px; + background-color: #42769e; + font-size: 12px; + font-weight: bold; + color: #ffffff; + padding: 4px 12px 12px 12px; + line-height: 22px; + text-shadow: #000000 1px 1px 1px; + border-left: 2px solid White; + border-right: 2px solid White; +} + +#directory-browser-tab > a { + color: White; +} + +#directory-browser-content { + position: absolute; + width: 400px; + height: 200px; + background-color: #ddd; + top: 108px; + border: 2px solid #42769e; + right: 12px; + box-shadow: 4px 4px 4px #999999; + -webkit-box-shadow: 4px 4px 4px #999999; + -moz-box-shadow: 4px 4px 4px #999999; + filter: progid:DXImageTransform.Microsoft.dropShadow(color=#999999, offX=4, offY=4, positive=true); + border-radius: 4px 4px 4px 4px; + -moz-border-radius: 4px 4px 4px 4px; + -webkit-border-radius: 4px 4px 4px 4px; +} + +#directory-browser-content > ul { + list-style: none; + margin: 0 !important; + padding: 0 !important; + max-height: 320px; + overflow-x: auto; +} + +#directory-browser-content > ul > li { + margin: 0 !important; + padding: 0 !important; + list-style: none; + background: transparent url(/media/vendor/jquery-ui/images-others/ui-bg_gloss-wave_75_2191c0_500x100.png) no-repeat center left; + font-size: 13px; + font-weight: bold; + text-shadow: #444 1px 1px 1px; +} + +#directory-browser-content > ul > li > a { + display: block; + color: White; + padding: 6px 8px 6px 30px !important; + background-color: transparant; + background-repeat: no-repeat; + background-position: 8px center; + border-bottom: 1px solid #333; + text-decoration: none; +} + +#directory-browser-content > ul > li:hover > a { + background-color: #333; + background-color: rgba(0, 0, 0, 0.15); +} + +#directory-browser-content > ul > .dir > a { + background-image: url(/media/images/folder.png); +} + +#directory-browser-content > ul > .file > a { + background-image: url(/media/images/page_white.png); +} + +#directory-browser-content > ul > .parent > a { + background-image: url(/media/images/control_rewind.png); + background-color: rgba(0, 0, 0, 0.15); +} + +/* + * Preservation planning report + */ + +#page-preservation-planning table { } +#page-preservation-planning table ul { list-style: none; } +#page-preservation-planning table ul li { display: inline; } + +/* + * Optimizations based in screen width + */ + +.w-lte-1020 .sip-detail-uuid, +.w-lte-1020 #sip-header-uuid { + display: none; +} + +.w-lte-1020 .sip-detail-directory > abbr { + display: inline; +} + +.w-lte-1020 #sip-header-directory { + width: 448px; +} + +.w-lte-1020 .sip-detail-directory { + width: 430px; +} + +.w-lte-1020 .job-detail-microservice { + width: 440px; +} + +.w-lte-1020 .job-detail-actions > a > span, +.w-lte-1020 .sip-detail-actions > a > span, +.w-lte-1200 .job-detail-actions > a > span, +.w-lte-1200 .sip-detail-actions > a > span { + display: none; +} + +.w-lte-1020 .job-detail-microservice > span:first-child { + display: none; +} + +/* + * Normalization report + */ + +td.error { + background-color: #f2d8d8; +} + +/* + * Forms + */ + +textarea { + resize: vertical; +} + +/* + * Reset jQuery UI dialog + */ + +.ui-dialog { + box-shadow: 0px 0px 8px #333; + -webkit-box-shadow: 0px 0px 8px #333; + -moz-box-shadow: 0px 0px 8px #333; + filter: progid:DXImageTransform.Microsoft.dropShadow(color=#333, offX=4, offY=4, positive=true); + padding: 0; + -moz-border-radius: 0px; + -webkit-border-radius: 0px; + -khtml-border-radius: 0px; + border-radius: 0px; +} + +.ui-dialog .ui-widget-header { + background: none; + border: 0; + border-bottom: 1px solid #e5e5e5; +} + +.ui-dialog .ui-dialog-title { + line-height: 1.2em; +} + +.ui-dialog .ui-dialog-titlebar { + -moz-border-radius: 0px; + -webkit-border-radius: 0px; + -khtml-border-radius: 0px; + border-radius: 0px; + background-color: #eee; + padding: 10px 18px; +} + +.ui-dialog .ui-dialog-content { + padding: 1em; +} + +.ui-dialog .ui-dialog-buttonpane { + border-color: #e5e5e5; +} + +/* + * + */ + +input.command, +textarea.command { + font-family: "Courier New", Courier, "Lucida Console", monospace; + font-size: 14px; + line-height: 1.5em; +} + +/* + * Preview help text (non-js friendly) + */ + +.preview-help-text { + +} + +.preview-help-text .preview { + display: none; +} + +.preview-help-text .content { + +} + +/* Global file browser tweaks */ +.backbone-file-explorer { + border: 1px solid #eee; + padding: 5px; +} + +.modal-body > .backbone-file-explorer { + height: 300px; + overflow: scroll; +} + +/* + * Welcome + */ + +#page-welcome form input { + + -webkit-box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; + width: 100%; + height: 28px; + font-size: 18px; + line-height: 1; + +} + +/* + * Login + */ + +#login { + + width: 320px; + margin: auto; + padding-top: 120px; + +} + +#login * { + + -webkit-box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; + +} + +#login h1 { + + background-image: url('../images/logo_login.gif'); + background-size: 239px 28px; + background-position: top center; + background-repeat: no-repeat; + text-indent: -9999px; + overflow: hidden; + +} + +#login form { + + padding: 26px 0 0 0; + background: #eee; + border: 1px solid #c5c5c5; + -moz-box-shadow: rgba(200,200,200,0.7) 0 4px 10px -1px; + -webkit-box-shadow: rgba(200, 200, 200, 0.7) 0 4px 10px -1px; + box-shadow: rgba(200, 200, 200, 0.7) 0 4px 10px -1px; + -webkit-border-radius: 4px; + -moz-border-radius: 4px; + border-radius: 4px; + +} + +#login input { + + width: 100%; + height: 40px; + font-size: 24px; + line-height: 1; + +} + +#login .fields { + padding: 0 20px 20px 20px; +} + +#login .actions { + + text-align: right; + margin: 0; + padding: 10px 20px; + background-color: #fafafa; + border-top: 1px solid #bbb; + -mox-box-shadow: inset 0px 3px 14px -6px rgba(0, 0, 0 ,0.6); + -webkit-box-shadow: inset 0px 3px 14px -6px rgba(0, 0, 0, 0.6); + box-shadow: inset 0px 3px 14px -6px rgba(0, 0, 0, 0.6); + +} + +#forgot-password { + float: left; + line-height: 28px; +} + +/* Sidebar */ + +.sidebar ul, +.sidebar li { + list-style: none; + margin: 0; + padding: 0; +} + +.sidebar li { + padding: 4px 6px; +} + +.sidebar .active { + border: 1px solid #666; + box-shadow: 1px 1px 1px rgba(0, 0, 0, .5); + -webkit-box-shadow: 1px 1px 1px rgba(0, 0, 0, .5); + -moz-box-shadow: 0px 1px 1px rgba(0, 0, 0, .5); + -webkit-border-radius: 0px 4px 4px 0; + -moz-border-radius: 0px 4px 4px 0; + border-radius: 0px 4px 4px 0; +} + +.sidebar .active a { + color: Black; + text-decoration: none; +} + +/* Rights administration */ + +.rights-basis-fieldset { + border-bottom: 1px solid #eee; + margin-bottom: 1em; +} + +.statute-fieldset, .non-repeating-formset { + margin-top: 2em; +} + +.statute-fieldset:nth-child(odd), .grant-fieldset:nth-child(odd) { + background: #ffe; +} + +.repeating-ajax-data-row { + margin-bottom: 1em; + padding-left: .5em; + padding-bottom: .5em; +} + +.repeating-ajax-data-row textarea { + width: 550px; +} + +.repeating-data-multifield { + background-color: #eee; +} + +.repeating-ajax-data-row > div > input { + width: 625px; +} + +.repeating-ajax-data-row > textarea, .repeating-ajax-data-field > textarea { + width: 625px; +} + +.rights-grant-restrictions > div > .repeating-ajax-data-row { + padding-left: 0em; + padding-bottom: 0em; +} + +.rights-grant-restrictions > .repeating-ajax-data-row { + padding-left: 0em; + padding-bottom: 0em; +} diff --git a/src/dashboard/src/media/css/transfer_grid.css b/src/dashboard/src/media/css/transfer_grid.css new file mode 100644 index 0000000000..48960dcda6 --- /dev/null +++ b/src/dashboard/src/media/css/transfer_grid.css @@ -0,0 +1,48 @@ +#transfer_component_form { + border-bottom: 1px dotted #ddd; + margin-bottom: 5px; + margin-top: 10px; + margin-bottom: 20px; +} + +#transfer-type { + width: 100px; +} + +#transfer-type-container { + float: left; + margin-right: 10px; +} + +.transfer-component-activity-indicator { + float: right; + display: none; +} + +#transfer-name-container { + float: left; + margin-right: 10px; +} + +#transfer-accession-number { + width: 75px; + margin-right: 10px; +} + +#tranfer_paths_area { + float:left; + width: 450px; +} + +#path_add_button { + margin-right: 10px; +} + +#path_source_select { + margin-right: 10px; +} + +#path_container { + margin-top: 20px; + margin-bottom: 10px; +} diff --git a/src/dashboard/src/media/images/accept.png b/src/dashboard/src/media/images/accept.png new file mode 100644 index 0000000000..89c8129a49 Binary files /dev/null and b/src/dashboard/src/media/images/accept.png differ diff --git a/src/dashboard/src/media/images/ajax-loader.gif b/src/dashboard/src/media/images/ajax-loader.gif new file mode 100644 index 0000000000..f2a1bc0c6f Binary files /dev/null and b/src/dashboard/src/media/images/ajax-loader.gif differ diff --git a/src/dashboard/src/media/images/arrow_refresh.png b/src/dashboard/src/media/images/arrow_refresh.png new file mode 100644 index 0000000000..0de26566d4 Binary files /dev/null and b/src/dashboard/src/media/images/arrow_refresh.png differ diff --git a/src/dashboard/src/media/images/attach.png b/src/dashboard/src/media/images/attach.png new file mode 100644 index 0000000000..ea897cc9f1 Binary files /dev/null and b/src/dashboard/src/media/images/attach.png differ diff --git a/src/dashboard/src/media/images/bell.png b/src/dashboard/src/media/images/bell.png new file mode 100644 index 0000000000..6e0015df4f Binary files /dev/null and b/src/dashboard/src/media/images/bell.png differ diff --git a/src/dashboard/src/media/images/bullet_arrow_down.png b/src/dashboard/src/media/images/bullet_arrow_down.png new file mode 100644 index 0000000000..9b23c06d7b Binary files /dev/null and b/src/dashboard/src/media/images/bullet_arrow_down.png differ diff --git a/src/dashboard/src/media/images/bullet_arrow_up.png b/src/dashboard/src/media/images/bullet_arrow_up.png new file mode 100644 index 0000000000..24df0f4212 Binary files /dev/null and b/src/dashboard/src/media/images/bullet_arrow_up.png differ diff --git a/src/dashboard/src/media/images/bullet_delete.png b/src/dashboard/src/media/images/bullet_delete.png new file mode 100644 index 0000000000..bd6271b246 Binary files /dev/null and b/src/dashboard/src/media/images/bullet_delete.png differ diff --git a/src/dashboard/src/media/images/bullet_green.png b/src/dashboard/src/media/images/bullet_green.png new file mode 100644 index 0000000000..058ad261f5 Binary files /dev/null and b/src/dashboard/src/media/images/bullet_green.png differ diff --git a/src/dashboard/src/media/images/bullet_orange.png b/src/dashboard/src/media/images/bullet_orange.png new file mode 100644 index 0000000000..fa63024e55 Binary files /dev/null and b/src/dashboard/src/media/images/bullet_orange.png differ diff --git a/src/dashboard/src/media/images/cancel.png b/src/dashboard/src/media/images/cancel.png new file mode 100644 index 0000000000..c149c2bc01 Binary files /dev/null and b/src/dashboard/src/media/images/cancel.png differ diff --git a/src/dashboard/src/media/images/clock.png b/src/dashboard/src/media/images/clock.png new file mode 100644 index 0000000000..e2672c2067 Binary files /dev/null and b/src/dashboard/src/media/images/clock.png differ diff --git a/src/dashboard/src/media/images/cog.png b/src/dashboard/src/media/images/cog.png new file mode 100644 index 0000000000..67de2c6ccb Binary files /dev/null and b/src/dashboard/src/media/images/cog.png differ diff --git a/src/dashboard/src/media/images/control_play_blue.png b/src/dashboard/src/media/images/control_play_blue.png new file mode 100644 index 0000000000..f8c8ec683e Binary files /dev/null and b/src/dashboard/src/media/images/control_play_blue.png differ diff --git a/src/dashboard/src/media/images/control_rewind.png b/src/dashboard/src/media/images/control_rewind.png new file mode 100644 index 0000000000..c029447717 Binary files /dev/null and b/src/dashboard/src/media/images/control_rewind.png differ diff --git a/src/dashboard/src/media/images/control_stop_blue.png b/src/dashboard/src/media/images/control_stop_blue.png new file mode 100644 index 0000000000..e6f75d232d Binary files /dev/null and b/src/dashboard/src/media/images/control_stop_blue.png differ diff --git a/src/dashboard/src/media/images/delete.png b/src/dashboard/src/media/images/delete.png new file mode 100644 index 0000000000..08f249365a Binary files /dev/null and b/src/dashboard/src/media/images/delete.png differ diff --git a/src/dashboard/src/media/images/dots.gif b/src/dashboard/src/media/images/dots.gif new file mode 100644 index 0000000000..7af5b6f3cf Binary files /dev/null and b/src/dashboard/src/media/images/dots.gif differ diff --git a/src/dashboard/src/media/images/error.png b/src/dashboard/src/media/images/error.png new file mode 100644 index 0000000000..628cf2dae3 Binary files /dev/null and b/src/dashboard/src/media/images/error.png differ diff --git a/src/dashboard/src/media/images/favicon.ico b/src/dashboard/src/media/images/favicon.ico new file mode 100644 index 0000000000..a709e96826 Binary files /dev/null and b/src/dashboard/src/media/images/favicon.ico differ diff --git a/src/dashboard/src/media/images/filefolder.png b/src/dashboard/src/media/images/filefolder.png new file mode 100644 index 0000000000..48649dcaa6 Binary files /dev/null and b/src/dashboard/src/media/images/filefolder.png differ diff --git a/src/dashboard/src/media/images/filefolderopen.png b/src/dashboard/src/media/images/filefolderopen.png new file mode 100644 index 0000000000..e377832c52 Binary files /dev/null and b/src/dashboard/src/media/images/filefolderopen.png differ diff --git a/src/dashboard/src/media/images/folder.png b/src/dashboard/src/media/images/folder.png new file mode 100644 index 0000000000..784e8fa482 Binary files /dev/null and b/src/dashboard/src/media/images/folder.png differ diff --git a/src/dashboard/src/media/images/folder_magnify.png b/src/dashboard/src/media/images/folder_magnify.png new file mode 100644 index 0000000000..0a3e7985c1 Binary files /dev/null and b/src/dashboard/src/media/images/folder_magnify.png differ diff --git a/src/dashboard/src/media/images/logo_header.png b/src/dashboard/src/media/images/logo_header.png new file mode 100644 index 0000000000..8cadb807cb Binary files /dev/null and b/src/dashboard/src/media/images/logo_header.png differ diff --git a/src/dashboard/src/media/images/logo_login.gif b/src/dashboard/src/media/images/logo_login.gif new file mode 100644 index 0000000000..111df18b3e Binary files /dev/null and b/src/dashboard/src/media/images/logo_login.gif differ diff --git a/src/dashboard/src/media/images/logo_welcome.gif b/src/dashboard/src/media/images/logo_welcome.gif new file mode 100644 index 0000000000..1742bfa6c8 Binary files /dev/null and b/src/dashboard/src/media/images/logo_welcome.gif differ diff --git a/src/dashboard/src/media/images/package_go.png b/src/dashboard/src/media/images/package_go.png new file mode 100644 index 0000000000..aace63ad6f Binary files /dev/null and b/src/dashboard/src/media/images/package_go.png differ diff --git a/src/dashboard/src/media/images/page_white.png b/src/dashboard/src/media/images/page_white.png new file mode 100644 index 0000000000..8b8b1ca000 Binary files /dev/null and b/src/dashboard/src/media/images/page_white.png differ diff --git a/src/dashboard/src/media/images/pencil_add.png b/src/dashboard/src/media/images/pencil_add.png new file mode 100644 index 0000000000..902bbe61b3 Binary files /dev/null and b/src/dashboard/src/media/images/pencil_add.png differ diff --git a/src/dashboard/src/media/images/table_edit.png b/src/dashboard/src/media/images/table_edit.png new file mode 100644 index 0000000000..bfcb0249af Binary files /dev/null and b/src/dashboard/src/media/images/table_edit.png differ diff --git a/src/dashboard/src/media/images/tick.png b/src/dashboard/src/media/images/tick.png new file mode 100644 index 0000000000..a9925a06ab Binary files /dev/null and b/src/dashboard/src/media/images/tick.png differ diff --git a/src/dashboard/src/media/images/user_comment.png b/src/dashboard/src/media/images/user_comment.png new file mode 100644 index 0000000000..e54ebebafb Binary files /dev/null and b/src/dashboard/src/media/images/user_comment.png differ diff --git a/src/dashboard/src/media/images/vcard_edit.png b/src/dashboard/src/media/images/vcard_edit.png new file mode 100644 index 0000000000..ab0f6e73d6 Binary files /dev/null and b/src/dashboard/src/media/images/vcard_edit.png differ diff --git a/src/dashboard/src/media/images/zoom.png b/src/dashboard/src/media/images/zoom.png new file mode 100644 index 0000000000..908612e394 Binary files /dev/null and b/src/dashboard/src/media/images/zoom.png differ diff --git a/src/dashboard/src/media/images/zoom_out.png b/src/dashboard/src/media/images/zoom_out.png new file mode 100644 index 0000000000..07bf98a79c Binary files /dev/null and b/src/dashboard/src/media/images/zoom_out.png differ diff --git a/src/dashboard/src/media/js/access.js b/src/dashboard/src/media/js/access.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/dashboard/src/media/js/administration/source_directory_picker.js b/src/dashboard/src/media/js/administration/source_directory_picker.js new file mode 100644 index 0000000000..89c7df5177 --- /dev/null +++ b/src/dashboard/src/media/js/administration/source_directory_picker.js @@ -0,0 +1,18 @@ +$(document).ready(function() { + var ajaxChildDataUrl = '/filesystem/children/' + , picker = new DirectoryPickerView({ + el: $('#explorer'), + levelTemplate: $('#template-dir-level').html(), + entryTemplate: $('#template-dir-entry').html(), + ajaxChildDataUrl: ajaxChildDataUrl + }); + + picker.structure = { + 'name': 'home', + 'parent': '', + 'children': [] + }; + + picker.render(); + picker.updateSources(); +}); diff --git a/src/dashboard/src/media/js/directory_picker.js b/src/dashboard/src/media/js/directory_picker.js new file mode 100644 index 0000000000..cb25fd0261 --- /dev/null +++ b/src/dashboard/src/media/js/directory_picker.js @@ -0,0 +1,138 @@ +var DirectoryPickerView = fileBrowser.FileExplorer.extend({ + + initialize: function() { + this.structure = {}; + this.options.closeDirsByDefault = true; + + // hide all files + this.options.entryDisplayFilter = function(entry) { + if (entry.children == undefined) { + return false; + } + return true; + }; + + this.ajaxChildDataUrl = this.options.ajaxChildDataUrl; + + this.render(); + + var self = this; + this.options.actionHandlers = [ + { + name: 'Select', + description: 'Select directory', + iconHtml: 'Add', + logic: function(result) { + self.addSource(self, result.path); + } + } + ]; + }, + + addSource: function(fileExplorer, path) { + var self = this; + $.post( + '/administration/sources/json/', + {path: path}, + function(response) { + self.updateSources(); + } + ); + }, + + deleteSource: function(id) { + var self = this; + this.confirm( + 'Delete source directory', + 'Are you sure you want to delete this?', + function() { + $.post( + '/administration/sources/delete/json/' + id + '/', + {}, + function(response) { + archivematicaNotifications.add({ + message: response.message + }); + self.updateSources(); + } + ); + } + ); + }, + + updateSources: function(cb) { + var self = this; + $.get('/administration/sources/json/' + '?' + new Date().getTime(), function(results) { + tableTemplate = _.template($('#template-source-directory-table').html()); + rowTemplate = _.template($('#template-source-directory-table-row').html()); + + $('#directories').empty(); + $('#directories').off('click'); + + if (results['directories'].length) { + var rowHtml = ''; + + for(var index in results['directories']) { + rowHtml += rowTemplate({ + id: results.directories[index].id, + path: results.directories[index].path + }); + } + + $('#directories').append(tableTemplate({rows: rowHtml})); + + $('#directories').on('click', 'a', function() { + var directoryId = $(this).attr('id').replace('directory_', ''); + self.deleteSource(directoryId); + }); + } + + if (cb != undefined) { + cb(); + } + }); + }, + + alert: function(title, message) { + $('
    ' + message + '
    ') + .dialog({ + title: title, + width: 200, + height: 200, + modal: true, + buttons: [ + { + text: 'OK', + click: function() { + $(this).dialog('close'); + } + } + ] + }); + }, + + confirm: function(title, message, logic) { + $('
    ' + message + '
    ') + .dialog({ + title: title, + width: 200, + height: 200, + modal: true, + buttons: [ + { + text: 'Yes', + click: function() { + $(this).dialog('close'); + logic(); + } + }, + { + text: 'Cancel', + click: function() { + $(this).dialog('close'); + } + } + ] + }); + } +}); diff --git a/src/dashboard/src/media/js/file-explorer.js b/src/dashboard/src/media/js/file-explorer.js new file mode 100644 index 0000000000..8035e070ea --- /dev/null +++ b/src/dashboard/src/media/js/file-explorer.js @@ -0,0 +1,518 @@ +(function(exports) { + + exports.Data = { + idPaths: {}, + startX: {}, + startY: {} + }; + + exports.File = Backbone.Model.extend({ + + // generate id without slashes and replacing periods + id: function() { + return this.path().replace(/\//g, '_').replace('.', '__'); + }, + + path: function() { + return this.get('parent') + '/' + this.get('name'); + }, + + type: function() { + return (this.children == undefined) ? 'file' : 'directory'; + } + }); + + exports.Directory = exports.File.extend({ + initialize: function() { + this.children = []; + this.cssClass = 'backbone-file-explorer-directory'; + }, + + addChild: function(options, Type) { + var child = new Type(options) + , parent = this.get('parent'); + + parent = (parent != undefined) + ? parent + '/' + this.get('name') + : this.get('name'); + + child.set({parent: parent}); + + this.children.push(child); + return child; + }, + + addFile: function(options) { + return this.addChild(options, exports.File); + }, + + addDir: function(options) { + return this.addChild(options, exports.Directory); + } + }); + + exports.EntryView = Backbone.View.extend({ + + initialize: function() { + this.model = this.options.entry; + this.explorer = this.options.explorer; + this.className = (this.model.children != undefined) + ? 'backbone-file-explorer-directory' + : 'directory-file'; + this.template = this.options.template; + this.entryClickHandler = this.options.entryClickHandler; + this.nameClickHandler = this.options.nameClickHandler; + this.actionHandlers = this.options.actionHandlers; + }, + + context: function() { + var context = this.model.toJSON(); + context.className = this.className; + return context; + }, + + cssId: function() { + return this.explorer.id + '_' + this.model.id(); + }, + + render: function() { + var context = this.context() + , html = this.template(context); + + this.el = $(html); + $(this.el).addClass(this.className); + + // set CSS ID for entries (used to capture whether directory is + // open/closed by user between data refreshes, etc.) + var id = (this.explorer) ? this.explorer.id + '_' : ''; + $(this.el).attr('id', id + this.model.id()); + + // add entry click handler if specified + if (this.entryClickHandler) { + var self = this; + $(this.el).click({self: this}, this.entryClickHandler); + } + + // add name click handler if specified + if (this.nameClickHandler) { + var self = this; + $(this.el).children('.backbone-file-explorer-directory_entry_name').click(function() { + self.nameClickHandler({ + self: self, + path: self.model.path(), + type: self.model.type() + }); + }); + } + + // add action handlers + if (this.actionHandlers) { + for(var index in this.actionHandlers) { + var handler = this.actionHandlers[index]; + var actionEl = $("" + handler.iconHtml + "") + , self = this; + // use closure to isolate handler logic + (function(handler) { + actionEl.click(function() { + handler.logic({ + self: self, + path: self.model.path(), + type: self.model.type() + }); + }); + })(handler); + $(this.el).children('.backbone-file-explorer-directory_entry_actions').append(actionEl); + } + } + + if (this.model.children == undefined) { + // remove directory button class for file entries + $(this.el).children('.backbone-file-explorer-directory_icon_button').removeClass('backbone-file-explorer-directory_icon_button'); + } else { + // add click handler to directory icon + var self = this; + $(this.el).children('.backbone-file-explorer-directory_icon_button').click(function() { + self.explorer.toggleDirectory($(self.el)); + }); + } + + return this; + } + }); + + exports.DirectoryView = Backbone.View.extend({ + + tagName: 'div', + + initialize: function() { + this.model = this.options.directory; + this.explorer = this.options.explorer; + this.ajaxChildDataUrl = this.options.ajaxChildDataUrl; + this.levelTemplate = _.template(this.options.levelTemplate); + this.entryTemplate = _.template(this.options.entryTemplate); + this.closeDirsByDefault = this.options.closeDirsByDefault; + this.entryDisplayFilter = this.options.entryDisplayFilter; + this.entryClickHandler = this.options.entryClickHandler; + this.nameClickHandler = this.options.nameClickHandler; + this.actionHandlers = this.options.actionHandlers; + }, + + renderChildren: function (self, entry, levelEl, level) { + // if entry is a directory, render children to directory level + if (entry.children != undefined) { + + for (var index in entry.children) { + var child = entry.children[index] + , allowDisplay = true; + + if (self.entryDisplayFilter) { + allowDisplay = self.entryDisplayFilter(child); + } + + // if display is allowed, do + if (allowDisplay) { + // take note of file paths that correspond to CSS IDs + // so they can be referenced by any external logic + var id = (this.explorer) ? this.explorer.id + '_' : ''; + id = id + child.id(); + exports.Data.idPaths[id] = child.path(); + + // render entry + var entryView = new exports.EntryView({ + explorer: self.explorer, + entry: child, + template: self.entryTemplate, + entryClickHandler: self.entryClickHandler, + nameClickHandler: self.nameClickHandler, + actionHandlers: self.actionHandlers + }); + + var entryEl = entryView.render().el + , isOpenDir = false; + + if (this.explorer.openDirs && this.explorer.openDirs.indexOf(entryView.cssId()) != -1) { + isOpenDir = true; + } + + // open directory, if applicable + if ((child.children != undefined && !self.closeDirsByDefault) || isOpenDir) { + $(entryEl).addClass('backbone-file-explorer-directory_open'); + } + + // add entry to current directory livel + $(levelEl).append(entryEl); + + // render child directories + self.renderDirectoryLevel(levelEl, child, level + 1, isOpenDir); + + // work around issue with certain edge-case + if ( + self.closeDirsByDefault + && child.children != undefined + && (child.children.length == 0 || !allowDisplay) + ) { + $(entryEl).next().hide(); + } + } + } + } + }, + + renderDirectoryLevel: function(destEl, entry, level, isOpen) { + var level = level || 1 + , levelEl = $(this.levelTemplate()); + + if (isOpen == undefined) { + isOpen = false; + } + + $(destEl).append(levelEl); + + // if not the top-level directory and everything's closed by default, then + // hide this directory level + if (level > 1 && this.closeDirsByDefault && !isOpen) { + $(destEl).hide(); + } + + // if directories are closed by default, be lazy and only load child + // entries when user hovers over entry, indicating they might open it + if (this.closeDirsByDefault) { + var self = this + , rendered = false; + + var uiUpdateLogic = function() { + if (!rendered) { + if (self.ajaxChildDataUrl) { + $.ajax({ + url: self.ajaxChildDataUrl, + data: { + path: entry.path() + }, + success: function(results) { + //console.log(results); + for(var index in results.entries) { + var entryName = results.entries[index]; + if (results.directories.indexOf(entryName) == -1) { + entry.addFile({name: entryName}); + } else { + entry.addDir({name: entryName}); + } + } + + // this code repeats below and should be refactored + self.renderChildren(self, entry, levelEl, level); + + // update zebra striping + $('.backbone-file-explorer-entry').removeClass( + 'backbone-file-explorer-entry-odd' + ); + $('.backbone-file-explorer-entry:visible:odd').addClass( + 'backbone-file-explorer-entry-odd' + ); + + // re-bind drag/drop + if (self.explorer.moveHandler) { + self.explorer.initDragAndDrop(); + } + + } + }); + } else { + self.renderChildren(self, entry, levelEl, level); + + // update zebra striping + $('.backbone-file-explorer-entry').removeClass( + 'backbone-file-explorer-entry-odd' + ); + $('.backbone-file-explorer-entry:visible:odd').addClass( + 'backbone-file-explorer-entry-odd' + ); + + // re-bind drag/drop + if (self.explorer.moveHandler) { + self.explorer.initDragAndDrop(); + } + } + rendered = true; + } + }; + + if (isOpen) { + uiUpdateLogic(); + $(levelEl).show(); + } else { + $(destEl).hover(uiUpdateLogic); + } + } else { + this.renderChildren(this, entry, levelEl, level); + } + }, + + render: function() { + var entryView = new exports.EntryView({ + explorer: this.explorer, + entry: this.model, + template: this.entryTemplate + }); + + var entryEl = entryView.render().el; + + exports.Data.idPaths[entryView.cssId()] = entryView.model.path(); + + if (!this.closeDirsByDefault) { + $(entryEl).addClass('backbone-file-explorer-directory_open'); + } + + $(this.el) + .empty() + .append(entryEl); + + this.renderDirectoryLevel(this.el, this.model); + + return this; + } + }); + + exports.FileExplorer = Backbone.View.extend({ + + tagName: 'div', + + initialize: function() { + this.ajaxChildDataUrl = this.options.ajaxChildDataUrl; + this.directory = this.options.directory; + this.structure = this.options.structure; + this.moveHandler = this.options.moveHandler; + this.openDirs = this.options.openDirs; + this.openDirs = this.openDirs || []; + this.id = $(this.el).attr('id'); + this.render(); + this.initDragAndDrop(); + }, + + initDragAndDrop: function() { + if (this.moveHandler) { + // bind drag-and-drop functionality + var self = this; + + // exclude top-level directory from being dragged + $(this.el) + .find('.backbone-file-explorer-entry:not(:first)') + .unbind('drag') + .bind('drag', {'self': self}, self.dragHandler); + + // allow top-level directory to be dragged into + $(this.el) + .find('.backbone-file-explorer-entry') + .unbind('drop') + .bind('drop', {'self': self}, self.dropHandler); + } + }, + + // convert JSON structure to entry objects + structureToObjects: function(structure, base) { + if (structure.children != undefined) { + base.set({name: structure.name}); + if (structure.parent != undefined) { + base.set({parent: structure.parent}); + } + for (var index in structure.children) { + var child = structure.children[index]; + if (child.children != undefined) { + var parent = base.addDir({name: child.name}); + parent = this.structureToObjects(child, parent); + } else { + base.addFile({name: child.name}); + } + } + } else { + base.addFile(structure.name); + } + + return base; + }, + + busy: function() { + $(this.el).append('Loading...'); + $(this.el).addClass('backbone-file-explorer-busy'); + $(this.el).removeClass('backbone-file-explorer-idle'); + }, + + idle: function() { + $('#backbone-file-explorer-busy-text').remove(); + $(this.el).addClass('backbone-file-explorer-idle'); + $(this.el).removeClass('backbone-file-explorer-busy'); + }, + + snapShotToggledFolders: function() { + this.toggled = []; + var self = this; + $('.backbone-file-explorer-directory').each(function(index, value) { + if (!$(value).next().is(':visible')) { + self.toggled.push($(value).attr('id')); + } + }); + }, + + restoreToggledFolders: function() { + for (var index in this.toggled) { + var cssId = this.toggled[index]; + this.toggleDirectory($('#' + cssId)); + } + }, + + dragHandler: function(event) { + var id = event.currentTarget.id + , $el = $('#' + event.currentTarget.id) + , offsets = $el.offset(); + + if (exports.Data.startY[id] == undefined) { + exports.Data.startX[id] = offsets.left; + exports.Data.startY[id] = offsets.top; + } + + $el.css({'z-index': 1}); + $el.css({left: event.offsetX - exports.Data.startX[id]}); + $el.css({top: event.offsetY - exports.Data.startY[id]}); + }, + + dropHandler: function(event) { + var droppedId = event.dragTarget.id; + var containerId = event.dropTarget.id; + var self = event.data.self; + + if (droppedId != containerId) { + var droppedPath = exports.Data.idPaths[droppedId]; + var containerPath = exports.Data.idPaths[containerId]; + var moveAllowed = containerPath.indexOf(droppedPath) != 0; + self.moveHandler({ + 'self': self, + 'droppedPath': droppedPath, + 'containerPath': containerPath, + 'allowed': moveAllowed + }); + } + $('#' + droppedId).css({left: 0}); + $('#' + droppedId).css({top: 0}); + }, + + toggleDirectory: function($el) { + $el.next().toggle(); + if ($el.next().is(':visible')) { + $el.addClass('backbone-file-explorer-directory_open'); + } else { + $el.removeClass('backbone-file-explorer-directory_open'); + } + }, + + getPathForCssId: function(id) { + return exports.Data.idPaths[id]; + }, + + getTypeForCssId: function(id) { + if ($('#' + id).hasClass('backbone-file-explorer-directory')) { + return 'directory'; + } else { + return 'file'; + } + }, + + render: function() { + var directory = this.directory; + + // if a JSON directory structure has been provided, render it + // into entry objects + if(this.structure) { + directory = this.structureToObjects( + this.structure, + new exports.Directory + ); + } + + var toggledFolders = this.snapShotToggledFolders(); + + this.dirView = new exports.DirectoryView({ + explorer: this, + ajaxChildDataUrl: this.ajaxChildDataUrl, + directory: directory, + openDirs: this.openDirs, + levelTemplate: this.options.levelTemplate, + entryTemplate: this.options.entryTemplate, + closeDirsByDefault: this.options.closeDirsByDefault, + entryDisplayFilter: this.options.entryDisplayFilter, + entryClickHandler: this.options.entryClickHandler, + nameClickHandler: this.options.nameClickHandler, + actionHandlers: this.options.actionHandlers + }); + + $(this.el) + .empty() + .append(this.dirView.render().el); + + this.restoreToggledFolders(); + + $('.backbone-file-explorer-entry:odd').addClass('backbone-file-explorer-entry-odd'); + + return this; + } + }); +})(typeof exports === 'undefined' ? this['fileBrowser'] = {} : exports); diff --git a/src/dashboard/src/media/js/file_browser.js b/src/dashboard/src/media/js/file_browser.js new file mode 100644 index 0000000000..03c96067f4 --- /dev/null +++ b/src/dashboard/src/media/js/file_browser.js @@ -0,0 +1,218 @@ +var FileExplorer = fileBrowser.FileExplorer.extend({ + + initialize: function() { + this.structure= {}; + this.options.closeDirsByDefault = true; + + this.render(); + this.initDragAndDrop(); + + var self = this; + + this.eventClickHandler = this.options.eventClickHandler; + + this.options.nameClickHandler = function(result) { + if (result.type != 'directory') { + window.open( + '/filesystem/download?filepath=' + encodeURIComponent(result.path), + '_blank' + ); + } + }; + + if (this.options.actionHandlers == undefined) { + this.options.actionHandlers = []; + } + + var self = this; + this.options.actionHandlers.push({ + name: 'Delete', + description: 'Delete file or directory', + iconHtml: "", + logic: function(result) { + self.confirm( + 'Delete', + 'Are you sure you want to delete this directory or file?', + function() { + self.deleteEntry(result.path, result.type); + } + ); + } + }); + + this.id = $(this.el).attr('id'); + }, + + deleteEntry: function(path, type) { + var self = this; + $.post( + '/filesystem/delete/', + {filepath: path}, + function(response) { + if (response.error) { + self.alert( + 'Delete', + response.message + ); + } + self.refresh(); + } + ); + }, + + refresh: function(path) { + $(this.el).empty(); + this.busy(); + + if (path != undefined) + { + this.path = path; + } + + var baseUrl = '/filesystem/contents/'; + var url = (this.path != undefined) + ? baseUrl + '?path=' + encodeURIComponent(this.path) + : baseUrl; + + var self = this; + + $.ajax({ + url: url, + async: false, + cache: false, + success: function(results) { + self.structure = results; + self.render(); + self.initDragAndDrop(); + self.idle(); + } + }); + }, + + moveHandler: function(move) { + if (move.allowed) { + move.self.busy(); + $('#message').text( + 'Dropped ID ' + move.droppedPath + ' onto ' + move.containerPath + ); + setTimeout(function() { + move.self.idle(); + $('#message').text(''); + }, 2000); + } else { + alert("You can't move a directory into its subdirectory."); + } + }, + + addSource: function(fileExplorer, path) { + var self = this; + $.post( + '/administration/sources/json/', + {path: path}, + function(response) { + self.alert( + 'Add source directory', + response.message + ); + self.updateSources(); + } + ); + }, + + deleteSource: function(id) { + var self = this; + this.confirm( + 'Delete source directory', + 'Are you sure you want to delete this?', + function() { + $.post( + '/administration/sources/delete/json/' + id + '/', + {}, + function(response) { + self.alert( + 'Delete source directory', + response.message + ); + self.updateSources(); + } + ); + } + ); + }, + + updateSources: function(cb) { + var self = this; + $.get('/administration/sources/json/' + '?' + new Date().getTime(), function(results) { + tableTemplate = _.template($('#template-source-directory-table').html()); + rowTemplate = _.template($('#template-source-directory-table-row').html()); + + $('#directories').empty(); + $('#directories').off('click'); + + if (results['directories'].length) { + var rowHtml = ''; + + for(var index in results['directories']) { + rowHtml += rowTemplate({ + id: results.directories[index].id, + path: results.directories[index].path + }); + } + + $('#directories').append(tableTemplate({rows: rowHtml})); + + $('#directories').on('click', 'a', function() { + var directoryId = $(this).attr('id').replace('directory_', ''); + self.deleteSource(directoryId); + }); + } + + if (cb != undefined) { + cb(); + } + }); + }, + + alert: function(title, message) { + $('
    ' + message + '
    ') + .dialog({ + title: title, + width: 200, + height: 200, + modal: true, + buttons: [ + { + text: 'OK', + click: function() { + $(this).dialog('close'); + } + } + ] + }); + }, + + confirm: function(title, message, logic) { + $('
    ' + message + '
    ') + .dialog({ + title: title, + width: 200, + height: 200, + modal: true, + buttons: [ + { + text: 'Yes', + click: function() { + $(this).dialog('close'); + logic(); + } + }, + { + text: 'Cancel', + click: function() { + $(this).dialog('close'); + } + } + ] + }); + } +}); diff --git a/src/dashboard/src/media/js/ingest.js b/src/dashboard/src/media/js/ingest.js new file mode 100644 index 0000000000..1e915dc0f5 --- /dev/null +++ b/src/dashboard/src/media/js/ingest.js @@ -0,0 +1,468 @@ +/* +This file is part of Archivematica. + +Copyright 2010-2012 Artefactual Systems Inc. + +Archivematica is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +Archivematica is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with Archivematica. If not, see . +*/ + +$(function() + { + window.Sip = Sip.extend({ + methodUrl: { + delete: '/ingest/uuid/delete/' + } + }); + + window.SipCollection = Backbone.Collection.extend({ + + model: Sip, + + url: '/ingest/status/', + + initialize: function() + { + + }, + + comparator: function(sip) + { + return -1 * sip.get('timestamp'); + } + + }); + + window.SipView = BaseSipView.extend({ + + template: _.template($('#sip-template').html()), + + initialize: function() + { + _.bindAll(this, 'render', 'update', 'updateIcon'); + this.model.view = this; + this.model.bind('change:timestamp', this.update); + }, + + openPanel: function(event) + { + event.preventDefault(); + + window.location = '/ingest/' + this.model.get('uuid') + '/'; + }, + + remove: function(event) + { + event.preventDefault(); + event.stopPropagation(); + + $(this.el).addClass('sip-removing'); + + var self = this; + + $('
    ' + + '

    Are you sure you want to remove this SIP from the dashboard? Note that this does not delete the SIP or related entities.

    ' + + '

    Directory: ' + this.model.get('directory') + '
    UUID: ' + this.model.get('uuid') + '
    Status: ' + $(this.el).find('.sip-detail-icon-status > img').attr('title') + '

    ' + + '
    ').dialog( + { + modal: true, + resizable: false, + draggable: false, + title: 'Remove SIP', + width: 480, + close: function(event, ui) + { + if (event.which !== undefined) + { + $(self.el).removeClass('sip-removing'); + } + }, + buttons: [ + { + text: 'Confirm', + click: function() { + + var $dialog = $(this); + + self.model.destroy({ + success: function (model, response) + { + $dialog.dialog('close'); + + setTimeout(function() + { + $(self.el).hide('blind', function() + { + $(this).remove(); + }); + }, 250); + }, + error: function(model, response) + { + $dialog.dialog('close'); + $(self.el).removeClass('sip-removing'); + } + + }); + } + }, + { + text: 'Cancel', + click: function() { + $(this).dialog('close'); + $(self.el).removeClass('sip-removing'); + } + }] + }); + }, + + openMetadataEditor: function(event) + { + event.stopPropagation(); + event.preventDefault(); + + var url = '/ingest/metadata/' + this.model.get('uuid') + '/'; + var self = this; + + var showDialog = function(data) + { + var dialog = $('') + .append(_.template($('#metadata-dialog').html(), data)) + .dialog({ + title: 'Dublin Core metadata editor', + width: 610, + height: 480, + modal: true, + resizable: false, + buttons: [ + { + text: 'Close', + click: function() + { + $(this).dialog('close'); + } + }, + { + text: 'Save', + click: function() + { + $.ajax({ + context: this, + type: 'POST', + dataType: 'json', + data: $(this).find('form').serialize(), + success: function() + { + $(this).dialog('close'); + }, + error: function() + { + alert("Error."); + }, + url: url}); + } + }] + }); + + if (self.model.jobs.detect(function(job) + { + return job.get('type') === 'Normalize submission documentation to preservation format'; + })) + { + dialog.find('input, select, textarea').prop('disabled', true).addClass('disabled'); + dialog.dialog('option', 'buttons', dialog.dialog('option', 'buttons').splice(0,1)); + } + }; + + $.ajax({ + type: 'GET', + dataType: 'json', + success: function(data) + { + showDialog(data); + }, + url: url + }); + + } + }); + + window.JobView = BaseJobView.extend({ + + className: 'job', + + events: { + 'click .btn_browse_job': 'browseJob', + 'click .btn_approve_job': 'approveJob', + 'click .btn_reject_job': 'rejectJob', + 'click .btn_show_tasks': 'showTasks', + 'click .btn_normalization_report': 'normalizationReport', + 'change select': 'action' + }, + + template: _.template($('#job-template').html()), + + render: function() + { + var jobData = this.model.toJSON(); + + if ( + jobData.type == 'Access normalization failed - copying' + || jobData.type == 'Preservation normalization failed - copying' + || jobData.type == 'thumbnail normalization failed - copying' + ) { + jobData.currentstep = 'Failed'; + } + + $(this.el).html(this.template(jobData)); + + $(this.el).css( + 'background-color', + this.getStatusColor(jobData.currentstep) + ); + + // Micro-services requiring approval + if (1 === this.model.get('status')) + { + this.$('.job-detail-actions') + .append('Browse') + .append('Approve') + .append('Reject'); + } + else + { + // ... + } + + choices = this.model.get('choices'); + + if (choices) + { + var $select = $('').append('') + , numberOfChoices = Object.keys(choices).length + , optionHtml; + + // use pop-up action selector for long choice lists + if (numberOfChoices >= 10) + { + var statusObject = {}; + this.activateEnhancedActionSelect($select, statusObject); + } + + for (var code in choices) + { + optionHtml = (choices[code] == 'Create SIP(s) manually') + ? '' + : ''; + $select.append(optionHtml); + } + + this.$('.job-detail-actions').append($select); + } + + this.$('.job-detail-microservice > a').tooltip(); + + this.$('.job-detail-actions > a').twipsy(); + + return this; + }, + + action: function(event) + { + var value = $(event.target).val() + , self = this; + + $.ajax({ + context: this, + data: { uuid: this.model.get('uuid'), choice: value }, + type: 'POST', + success: function(data) + { + this.model.set({ + 'currentstep': 'Executing command(s)', + 'status': 0 + }); + + this.model.sip.view.updateIcon(); + + // get rid of select, etc. + self.$('.job-detail-actions').empty(); + }, + url: '/mcp/execute/' + }); + } + + }); + + window.DirectoryBrowserView = BaseDirectoryBrowserView.extend({ + template: _.template($('#directory-browser-template').html()) + }); + + window.AppView = BaseAppView.extend({ + el: $('#sip-container') + }); + + $.fn.tooltip = tooltipPlugin; + window.onresize = optimizeWidth; + window.onload = optimizeWidth; + } +); diff --git a/src/dashboard/src/media/js/transfer/component_directory_select.js b/src/dashboard/src/media/js/transfer/component_directory_select.js new file mode 100644 index 0000000000..a8d2ac1ceb --- /dev/null +++ b/src/dashboard/src/media/js/transfer/component_directory_select.js @@ -0,0 +1,69 @@ +var DirectorySelectorView = fileBrowser.FileExplorer.extend({ + + initialize: function() { + this.structure = {}; + this.options.closeDirsByDefault = true; + this.options.entryDisplayFilter = function(entry) { + // if a file and not a ZIP file, then hide + if ( + entry.children == undefined + && entry.attributes.name.toLowerCase().indexOf('.zip') == -1 + ) { + return false; + } + return true; + }; + + this.render(); + + this.options.actionHandlers = [] + } +}); + +function createDirectoryPicker(baseDirectory, modalCssId, targetCssId) { + var url = '/filesystem/contents/?path=' + encodeURIComponent(baseDirectory) + + var selector = new DirectorySelectorView({ + el: $('#explorer'), + levelTemplate: $('#template-dir-level').html(), + entryTemplate: $('#template-dir-entry').html() + }); + + selector.options.actionHandlers.push({ + name: 'Select', + description: 'Select', + iconHtml: 'Add', + logic: function(result) { + var $transferPathRowEl = $('
    ') + , $transferPathEl = $('') + , $transferPathDeleteRl = $(''); + + $transferPathDeleteRl.click(function() { + $transferPathRowEl.remove(); + }); + + $transferPathEl.html('/' + result.path); + $transferPathRowEl.append($transferPathEl); + $transferPathRowEl.append($transferPathDeleteRl); + $('#' + targetCssId).append($transferPathRowEl); + $('#' + modalCssId).remove(); + + // tiger stripe transfer paths + $('.transfer_path').each(function() { + $(this).parent().css('background-color', ''); + }); + $('.transfer_path:odd').each(function() { + $(this).parent().css('background-color', '#eee'); + }); + } + }); + + selector.busy(); + + $.get(url, function(results) { + selector.structure = results; + selector.render(); + + selector.idle(); + }); +} diff --git a/src/dashboard/src/media/js/transfer/component_form.js b/src/dashboard/src/media/js/transfer/component_form.js new file mode 100644 index 0000000000..a83f40bc35 --- /dev/null +++ b/src/dashboard/src/media/js/transfer/component_form.js @@ -0,0 +1,184 @@ +var TransferComponentFormView = Backbone.View.extend({ + initialize: function(options) { + this.form_layout_template = _.template(options.form_layout_template); + this.modal_template = options.modal_template; + this.sourceDirectories = options.sourceDirectories; + }, + + showSelector: function(sourceDir) { + + // display action selector in modal window + $(this.modal_template).modal({show: true}); + + // make it destroy rather than hide modal + $('#transfer-component-select-close, #transfer-component-select-cancel') + .click(function() { + $('#transfer-component-select-modal').remove(); + }); + + // add directory selector + createDirectoryPicker( + sourceDir, + 'transfer-component-select-modal', + 'path_container' + ); + }, + + addedPaths: function() { + var paths = []; + $('.transfer_path').each(function() { + paths.push($(this).text()); + }); + return paths; + }, + + startTransfer: function(transfer) { + var path + , copied = 0; + + $('.transfer-component-activity-indicator').show(); + // get path to temp directory in which to copy individual transfer + // components + $.ajax({ + url: '/filesystem/get_temp_directory/', + type: 'GET', + cache: false, + success: function(results) { + + var tempDir = results.tempDir; + + // copy each transfer component to the temp directory + for (var index in transfer.sourcePaths) { + path = transfer.sourcePaths[index]; + + $.ajax({ + url: '/filesystem/copy_transfer_component/', + type: 'POST', + async: false, + cache: false, + data: { + name: transfer.name, + path: path, + destination: tempDir + }, + success: function(results) { + copied++; + } + }); + } + // move from temp directory to appropriate watchdir + var url = '/filesystem/ransfer/' + , isZipFile = path.toLowerCase().indexOf('.zip') != -1 + , filepath; + + // if transfer is a ZIP file, then extract basename add to temporary directory + if (isZipFile) { + filepath = tempDir + '/' + path.replace(/\\/g,'/').replace( /.*\//, '' ); + } else { + filepath = tempDir + '/' + transfer.name; + } + + $.ajax({ + url: url, + type: 'POST', + async: false, + cache: false, + data: { + filepath: filepath, + type: transfer.type + }, + success: function(results) { + $('#transfer-name').val(''); + $('#transfer-name-container').show(); + $('#transfer-type').val('standard'); + $('#path_container').html(''); + $('.transfer-component-activity-indicator').hide(); + } + }); + // report progress + } + }); + }, + + render: function() { + var $pathAreaEl = $('
    ') + , $pathContainerEl = $('
    '); + + this.pathContainerEl = $pathContainerEl; + + // add button to add paths via a pop-up selector + var $buttonContainer = $('
    ') + , $addButton = $('Browse') + , $sourceDirSelect = $('') + , $startTransferButton = $('Start transfer') + , self = this; + + $buttonContainer + .append($sourceDirSelect) + .append($addButton) + .append($startTransferButton); + + $pathAreaEl.append($buttonContainer); + + // add path container to parent container + $pathAreaEl.append($pathContainerEl); + + // populate select with source directory values + $.each(this.sourceDirectories, function(id, path) { + $sourceDirSelect + .append($("") + .attr("value", id) + .text(path)); + }); + + // populate view's DOM element with template output + var context = { + transfer_paths: $pathAreaEl.html() + }; + $(this.el).html(this.form_layout_template(context)); + + // make add button clickable + $('#path_add_button').click(function() { + // add modal containing directory selector + // selecting makes modal disappear, adds directory, and re-renders + self.showSelector($('#path_source_select').children(':selected').text()); + }); + + // add logic to determine whether or not transfer name needs to be + // visible if transfer type changed + $('#transfer-type').change(function() { + if ($(this).val() == 'zipped bag') { + $('#transfer-name-container').hide('slide', {direction: 'left'}, 250); + } else { + $('#transfer-name-container').show('slide', {direction: 'left'}, 250); + } + }); + + // make start transfer button clickable + $('#start_transfer_button').click(function() { + var transferName = $('#transfer-name').val(); + + // if transfering a zipped bag, give it a dummy name + if ($('#transfer-type').val() == 'zipped bag') { + transferName = 'ZippedBag'; + } + + if (!transferName) + { + alert('Please enter a transfer name'); + } else { + if (!self.addedPaths().length) { + alert('Please click "Browse" to add one or more paths from the source directory.'); + } else { + var transferData = { + 'name': transferName, + 'type': $('#transfer-type').val(), + 'accessionNumber': $('#transfer-accession-number').val(), + 'sourcePaths': self.addedPaths() + }; + self.startTransfer(transferData); + } + } + }); + } +}); diff --git a/src/dashboard/src/media/vendor/backbone.js b/src/dashboard/src/media/vendor/backbone.js new file mode 100644 index 0000000000..b2e49322b9 --- /dev/null +++ b/src/dashboard/src/media/vendor/backbone.js @@ -0,0 +1,1158 @@ +// Backbone.js 0.5.3 +// (c) 2010 Jeremy Ashkenas, DocumentCloud Inc. +// Backbone may be freely distributed under the MIT license. +// For all details and documentation: +// http://documentcloud.github.com/backbone + +(function(){ + + // Initial Setup + // ------------- + + // Save a reference to the global object. + var root = this; + + // Save the previous value of the `Backbone` variable. + var previousBackbone = root.Backbone; + + // The top-level namespace. All public Backbone classes and modules will + // be attached to this. Exported for both CommonJS and the browser. + var Backbone; + if (typeof exports !== 'undefined') { + Backbone = exports; + } else { + Backbone = root.Backbone = {}; + } + + // Current version of the library. Keep in sync with `package.json`. + Backbone.VERSION = '0.5.3'; + + // Require Underscore, if we're on the server, and it's not already present. + var _ = root._; + if (!_ && (typeof require !== 'undefined')) _ = require('underscore')._; + + // For Backbone's purposes, jQuery or Zepto owns the `$` variable. + var $ = root.jQuery || root.Zepto; + + // Runs Backbone.js in *noConflict* mode, returning the `Backbone` variable + // to its previous owner. Returns a reference to this Backbone object. + Backbone.noConflict = function() { + root.Backbone = previousBackbone; + return this; + }; + + // Turn on `emulateHTTP` to support legacy HTTP servers. Setting this option will + // fake `"PUT"` and `"DELETE"` requests via the `_method` parameter and set a + // `X-Http-Method-Override` header. + Backbone.emulateHTTP = false; + + // Turn on `emulateJSON` to support legacy servers that can't deal with direct + // `application/json` requests ... will encode the body as + // `application/x-www-form-urlencoded` instead and will send the model in a + // form param named `model`. + Backbone.emulateJSON = false; + + // Backbone.Events + // ----------------- + + // A module that can be mixed in to *any object* in order to provide it with + // custom events. You may `bind` or `unbind` a callback function to an event; + // `trigger`-ing an event fires all callbacks in succession. + // + // var object = {}; + // _.extend(object, Backbone.Events); + // object.bind('expand', function(){ alert('expanded'); }); + // object.trigger('expand'); + // + Backbone.Events = { + + // Bind an event, specified by a string name, `ev`, to a `callback` function. + // Passing `"all"` will bind the callback to all events fired. + bind : function(ev, callback, context) { + var calls = this._callbacks || (this._callbacks = {}); + var list = calls[ev] || (calls[ev] = []); + list.push([callback, context]); + return this; + }, + + // Remove one or many callbacks. If `callback` is null, removes all + // callbacks for the event. If `ev` is null, removes all bound callbacks + // for all events. + unbind : function(ev, callback) { + var calls; + if (!ev) { + this._callbacks = {}; + } else if (calls = this._callbacks) { + if (!callback) { + calls[ev] = []; + } else { + var list = calls[ev]; + if (!list) return this; + for (var i = 0, l = list.length; i < l; i++) { + if (list[i] && callback === list[i][0]) { + list[i] = null; + break; + } + } + } + } + return this; + }, + + // Trigger an event, firing all bound callbacks. Callbacks are passed the + // same arguments as `trigger` is, apart from the event name. + // Listening for `"all"` passes the true event name as the first argument. + trigger : function(eventName) { + var list, calls, ev, callback, args; + var both = 2; + if (!(calls = this._callbacks)) return this; + while (both--) { + ev = both ? eventName : 'all'; + if (list = calls[ev]) { + for (var i = 0, l = list.length; i < l; i++) { + if (!(callback = list[i])) { + list.splice(i, 1); i--; l--; + } else { + args = both ? Array.prototype.slice.call(arguments, 1) : arguments; + callback[0].apply(callback[1] || this, args); + } + } + } + } + return this; + } + + }; + + // Backbone.Model + // -------------- + + // Create a new model, with defined attributes. A client id (`cid`) + // is automatically generated and assigned for you. + Backbone.Model = function(attributes, options) { + var defaults; + attributes || (attributes = {}); + if (defaults = this.defaults) { + if (_.isFunction(defaults)) defaults = defaults.call(this); + attributes = _.extend({}, defaults, attributes); + } + this.attributes = {}; + this._escapedAttributes = {}; + this.cid = _.uniqueId('c'); + this.set(attributes, {silent : true}); + this._changed = false; + this._previousAttributes = _.clone(this.attributes); + if (options && options.collection) this.collection = options.collection; + this.initialize(attributes, options); + }; + + // Attach all inheritable methods to the Model prototype. + _.extend(Backbone.Model.prototype, Backbone.Events, { + + // A snapshot of the model's previous attributes, taken immediately + // after the last `"change"` event was fired. + _previousAttributes : null, + + // Has the item been changed since the last `"change"` event? + _changed : false, + + // The default name for the JSON `id` attribute is `"id"`. MongoDB and + // CouchDB users may want to set this to `"_id"`. + idAttribute : 'id', + + // Initialize is an empty function by default. Override it with your own + // initialization logic. + initialize : function(){}, + + // Return a copy of the model's `attributes` object. + toJSON : function() { + return _.clone(this.attributes); + }, + + // Get the value of an attribute. + get : function(attr) { + return this.attributes[attr]; + }, + + // Get the HTML-escaped value of an attribute. + escape : function(attr) { + var html; + if (html = this._escapedAttributes[attr]) return html; + var val = this.attributes[attr]; + return this._escapedAttributes[attr] = escapeHTML(val == null ? '' : '' + val); + }, + + // Returns `true` if the attribute contains a value that is not null + // or undefined. + has : function(attr) { + return this.attributes[attr] != null; + }, + + // Set a hash of model attributes on the object, firing `"change"` unless you + // choose to silence it. + set : function(attrs, options) { + + // Extract attributes and options. + options || (options = {}); + if (!attrs) return this; + if (attrs.attributes) attrs = attrs.attributes; + var now = this.attributes, escaped = this._escapedAttributes; + + // Run validation. + if (!options.silent && this.validate && !this._performValidation(attrs, options)) return false; + + // Check for changes of `id`. + if (this.idAttribute in attrs) this.id = attrs[this.idAttribute]; + + // We're about to start triggering change events. + var alreadyChanging = this._changing; + this._changing = true; + + // Update attributes. + for (var attr in attrs) { + var val = attrs[attr]; + if (!_.isEqual(now[attr], val)) { + now[attr] = val; + delete escaped[attr]; + this._changed = true; + if (!options.silent) this.trigger('change:' + attr, this, val, options); + } + } + + // Fire the `"change"` event, if the model has been changed. + if (!alreadyChanging && !options.silent && this._changed) this.change(options); + this._changing = false; + return this; + }, + + // Remove an attribute from the model, firing `"change"` unless you choose + // to silence it. `unset` is a noop if the attribute doesn't exist. + unset : function(attr, options) { + if (!(attr in this.attributes)) return this; + options || (options = {}); + var value = this.attributes[attr]; + + // Run validation. + var validObj = {}; + validObj[attr] = void 0; + if (!options.silent && this.validate && !this._performValidation(validObj, options)) return false; + + // Remove the attribute. + delete this.attributes[attr]; + delete this._escapedAttributes[attr]; + if (attr == this.idAttribute) delete this.id; + this._changed = true; + if (!options.silent) { + this.trigger('change:' + attr, this, void 0, options); + this.change(options); + } + return this; + }, + + // Clear all attributes on the model, firing `"change"` unless you choose + // to silence it. + clear : function(options) { + options || (options = {}); + var attr; + var old = this.attributes; + + // Run validation. + var validObj = {}; + for (attr in old) validObj[attr] = void 0; + if (!options.silent && this.validate && !this._performValidation(validObj, options)) return false; + + this.attributes = {}; + this._escapedAttributes = {}; + this._changed = true; + if (!options.silent) { + for (attr in old) { + this.trigger('change:' + attr, this, void 0, options); + } + this.change(options); + } + return this; + }, + + // Fetch the model from the server. If the server's representation of the + // model differs from its current attributes, they will be overriden, + // triggering a `"change"` event. + fetch : function(options) { + options || (options = {}); + var model = this; + var success = options.success; + options.success = function(resp, status, xhr) { + if (!model.set(model.parse(resp, xhr), options)) return false; + if (success) success(model, resp); + }; + options.error = wrapError(options.error, model, options); + return (this.sync || Backbone.sync).call(this, 'read', this, options); + }, + + // Set a hash of model attributes, and sync the model to the server. + // If the server returns an attributes hash that differs, the model's + // state will be `set` again. + save : function(attrs, options) { + options || (options = {}); + if (attrs && !this.set(attrs, options)) return false; + var model = this; + var success = options.success; + options.success = function(resp, status, xhr) { + if (!model.set(model.parse(resp, xhr), options)) return false; + if (success) success(model, resp, xhr); + }; + options.error = wrapError(options.error, model, options); + var method = this.isNew() ? 'create' : 'update'; + return (this.sync || Backbone.sync).call(this, method, this, options); + }, + + // Destroy this model on the server if it was already persisted. Upon success, the model is removed + // from its collection, if it has one. + destroy : function(options) { + options || (options = {}); + if (this.isNew()) return this.trigger('destroy', this, this.collection, options); + var model = this; + var success = options.success; + options.success = function(resp) { + model.trigger('destroy', model, model.collection, options); + if (success) success(model, resp); + }; + options.error = wrapError(options.error, model, options); + return (this.sync || Backbone.sync).call(this, 'delete', this, options); + }, + + // Default URL for the model's representation on the server -- if you're + // using Backbone's restful methods, override this to change the endpoint + // that will be called. + url : function() { + var base = getUrl(this.collection) || this.urlRoot || urlError(); + if (this.isNew()) return base; + return base + (base.charAt(base.length - 1) == '/' ? '' : '/') + encodeURIComponent(this.id); + }, + + // **parse** converts a response into the hash of attributes to be `set` on + // the model. The default implementation is just to pass the response along. + parse : function(resp, xhr) { + return resp; + }, + + // Create a new model with identical attributes to this one. + clone : function() { + return new this.constructor(this); + }, + + // A model is new if it has never been saved to the server, and lacks an id. + isNew : function() { + return this.id == null; + }, + + // Call this method to manually fire a `change` event for this model. + // Calling this will cause all objects observing the model to update. + change : function(options) { + this.trigger('change', this, options); + this._previousAttributes = _.clone(this.attributes); + this._changed = false; + }, + + // Determine if the model has changed since the last `"change"` event. + // If you specify an attribute name, determine if that attribute has changed. + hasChanged : function(attr) { + if (attr) return this._previousAttributes[attr] != this.attributes[attr]; + return this._changed; + }, + + // Return an object containing all the attributes that have changed, or false + // if there are no changed attributes. Useful for determining what parts of a + // view need to be updated and/or what attributes need to be persisted to + // the server. + changedAttributes : function(now) { + now || (now = this.attributes); + var old = this._previousAttributes; + var changed = false; + for (var attr in now) { + if (!_.isEqual(old[attr], now[attr])) { + changed = changed || {}; + changed[attr] = now[attr]; + } + } + return changed; + }, + + // Get the previous value of an attribute, recorded at the time the last + // `"change"` event was fired. + previous : function(attr) { + if (!attr || !this._previousAttributes) return null; + return this._previousAttributes[attr]; + }, + + // Get all of the attributes of the model at the time of the previous + // `"change"` event. + previousAttributes : function() { + return _.clone(this._previousAttributes); + }, + + // Run validation against a set of incoming attributes, returning `true` + // if all is well. If a specific `error` callback has been passed, + // call that instead of firing the general `"error"` event. + _performValidation : function(attrs, options) { + var error = this.validate(attrs); + if (error) { + if (options.error) { + options.error(this, error, options); + } else { + this.trigger('error', this, error, options); + } + return false; + } + return true; + } + + }); + + // Backbone.Collection + // ------------------- + + // Provides a standard collection class for our sets of models, ordered + // or unordered. If a `comparator` is specified, the Collection will maintain + // its models in sort order, as they're added and removed. + Backbone.Collection = function(models, options) { + options || (options = {}); + if (options.comparator) this.comparator = options.comparator; + _.bindAll(this, '_onModelEvent', '_removeReference'); + this._reset(); + if (models) this.reset(models, {silent: true}); + this.initialize.apply(this, arguments); + }; + + // Define the Collection's inheritable methods. + _.extend(Backbone.Collection.prototype, Backbone.Events, { + + // The default model for a collection is just a **Backbone.Model**. + // This should be overridden in most cases. + model : Backbone.Model, + + // Initialize is an empty function by default. Override it with your own + // initialization logic. + initialize : function(){}, + + // The JSON representation of a Collection is an array of the + // models' attributes. + toJSON : function() { + return this.map(function(model){ return model.toJSON(); }); + }, + + // Add a model, or list of models to the set. Pass **silent** to avoid + // firing the `added` event for every new model. + add : function(models, options) { + if (_.isArray(models)) { + for (var i = 0, l = models.length; i < l; i++) { + this._add(models[i], options); + } + } else { + this._add(models, options); + } + return this; + }, + + // Remove a model, or a list of models from the set. Pass silent to avoid + // firing the `removed` event for every model removed. + remove : function(models, options) { + if (_.isArray(models)) { + for (var i = 0, l = models.length; i < l; i++) { + this._remove(models[i], options); + } + } else { + this._remove(models, options); + } + return this; + }, + + // Get a model from the set by id. + get : function(id) { + if (id == null) return null; + return this._byId[id.id != null ? id.id : id]; + }, + + // Get a model from the set by client id. + getByCid : function(cid) { + return cid && this._byCid[cid.cid || cid]; + }, + + // Get the model at the given index. + at: function(index) { + return this.models[index]; + }, + + // Force the collection to re-sort itself. You don't need to call this under normal + // circumstances, as the set will maintain sort order as each item is added. + sort : function(options) { + options || (options = {}); + if (!this.comparator) throw new Error('Cannot sort a set without a comparator'); + this.models = this.sortBy(this.comparator); + if (!options.silent) this.trigger('reset', this, options); + return this; + }, + + // Pluck an attribute from each model in the collection. + pluck : function(attr) { + return _.map(this.models, function(model){ return model.get(attr); }); + }, + + // When you have more items than you want to add or remove individually, + // you can reset the entire set with a new list of models, without firing + // any `added` or `removed` events. Fires `reset` when finished. + reset : function(models, options) { + models || (models = []); + options || (options = {}); + this.each(this._removeReference); + this._reset(); + this.add(models, {silent: true}); + if (!options.silent) this.trigger('reset', this, options); + return this; + }, + + // Fetch the default set of models for this collection, resetting the + // collection when they arrive. If `add: true` is passed, appends the + // models to the collection instead of resetting. + fetch : function(options) { + options || (options = {}); + var collection = this; + var success = options.success; + options.success = function(resp, status, xhr) { + collection[options.add ? 'add' : 'reset'](collection.parse(resp, xhr), options); + if (success) success(collection, resp); + }; + options.error = wrapError(options.error, collection, options); + return (this.sync || Backbone.sync).call(this, 'read', this, options); + }, + + // Create a new instance of a model in this collection. After the model + // has been created on the server, it will be added to the collection. + // Returns the model, or 'false' if validation on a new model fails. + create : function(model, options) { + var coll = this; + options || (options = {}); + model = this._prepareModel(model, options); + if (!model) return false; + var success = options.success; + options.success = function(nextModel, resp, xhr) { + coll.add(nextModel, options); + if (success) success(nextModel, resp, xhr); + }; + model.save(null, options); + return model; + }, + + // **parse** converts a response into a list of models to be added to the + // collection. The default implementation is just to pass it through. + parse : function(resp, xhr) { + return resp; + }, + + // Proxy to _'s chain. Can't be proxied the same way the rest of the + // underscore methods are proxied because it relies on the underscore + // constructor. + chain: function () { + return _(this.models).chain(); + }, + + // Reset all internal state. Called when the collection is reset. + _reset : function(options) { + this.length = 0; + this.models = []; + this._byId = {}; + this._byCid = {}; + }, + + // Prepare a model to be added to this collection + _prepareModel: function(model, options) { + if (!(model instanceof Backbone.Model)) { + var attrs = model; + model = new this.model(attrs, {collection: this}); + if (model.validate && !model._performValidation(attrs, options)) model = false; + } else if (!model.collection) { + model.collection = this; + } + return model; + }, + + // Internal implementation of adding a single model to the set, updating + // hash indexes for `id` and `cid` lookups. + // Returns the model, or 'false' if validation on a new model fails. + _add : function(model, options) { + options || (options = {}); + model = this._prepareModel(model, options); + if (!model) return false; + var already = this.getByCid(model); + if (already) throw new Error(["Can't add the same model to a set twice", already.id]); + this._byId[model.id] = model; + this._byCid[model.cid] = model; + var index = options.at != null ? options.at : + this.comparator ? this.sortedIndex(model, this.comparator) : + this.length; + this.models.splice(index, 0, model); + model.bind('all', this._onModelEvent); + this.length++; + if (!options.silent) model.trigger('add', model, this, options); + return model; + }, + + // Internal implementation of removing a single model from the set, updating + // hash indexes for `id` and `cid` lookups. + _remove : function(model, options) { + options || (options = {}); + model = this.getByCid(model) || this.get(model); + if (!model) return null; + delete this._byId[model.id]; + delete this._byCid[model.cid]; + this.models.splice(this.indexOf(model), 1); + this.length--; + if (!options.silent) model.trigger('remove', model, this, options); + this._removeReference(model); + return model; + }, + + // Internal method to remove a model's ties to a collection. + _removeReference : function(model) { + if (this == model.collection) { + delete model.collection; + } + model.unbind('all', this._onModelEvent); + }, + + // Internal method called every time a model in the set fires an event. + // Sets need to update their indexes when models change ids. All other + // events simply proxy through. "add" and "remove" events that originate + // in other collections are ignored. + _onModelEvent : function(ev, model, collection, options) { + if ((ev == 'add' || ev == 'remove') && collection != this) return; + if (ev == 'destroy') { + this._remove(model, options); + } + if (model && ev === 'change:' + model.idAttribute) { + delete this._byId[model.previous(model.idAttribute)]; + this._byId[model.id] = model; + } + this.trigger.apply(this, arguments); + } + + }); + + // Underscore methods that we want to implement on the Collection. + var methods = ['forEach', 'each', 'map', 'reduce', 'reduceRight', 'find', 'detect', + 'filter', 'select', 'reject', 'every', 'all', 'some', 'any', 'include', + 'contains', 'invoke', 'max', 'min', 'sortBy', 'sortedIndex', 'toArray', 'size', + 'first', 'rest', 'last', 'without', 'indexOf', 'lastIndexOf', 'isEmpty', 'groupBy']; + + // Mix in each Underscore method as a proxy to `Collection#models`. + _.each(methods, function(method) { + Backbone.Collection.prototype[method] = function() { + return _[method].apply(_, [this.models].concat(_.toArray(arguments))); + }; + }); + + // Backbone.Router + // ------------------- + + // Routers map faux-URLs to actions, and fire events when routes are + // matched. Creating a new one sets its `routes` hash, if not set statically. + Backbone.Router = function(options) { + options || (options = {}); + if (options.routes) this.routes = options.routes; + this._bindRoutes(); + this.initialize.apply(this, arguments); + }; + + // Cached regular expressions for matching named param parts and splatted + // parts of route strings. + var namedParam = /:([\w\d]+)/g; + var splatParam = /\*([\w\d]+)/g; + var escapeRegExp = /[-[\]{}()+?.,\\^$|#\s]/g; + + // Set up all inheritable **Backbone.Router** properties and methods. + _.extend(Backbone.Router.prototype, Backbone.Events, { + + // Initialize is an empty function by default. Override it with your own + // initialization logic. + initialize : function(){}, + + // Manually bind a single named route to a callback. For example: + // + // this.route('search/:query/p:num', 'search', function(query, num) { + // ... + // }); + // + route : function(route, name, callback) { + Backbone.history || (Backbone.history = new Backbone.History); + if (!_.isRegExp(route)) route = this._routeToRegExp(route); + Backbone.history.route(route, _.bind(function(fragment) { + var args = this._extractParameters(route, fragment); + callback.apply(this, args); + this.trigger.apply(this, ['route:' + name].concat(args)); + }, this)); + }, + + // Simple proxy to `Backbone.history` to save a fragment into the history. + navigate : function(fragment, triggerRoute) { + Backbone.history.navigate(fragment, triggerRoute); + }, + + // Bind all defined routes to `Backbone.history`. We have to reverse the + // order of the routes here to support behavior where the most general + // routes can be defined at the bottom of the route map. + _bindRoutes : function() { + if (!this.routes) return; + var routes = []; + for (var route in this.routes) { + routes.unshift([route, this.routes[route]]); + } + for (var i = 0, l = routes.length; i < l; i++) { + this.route(routes[i][0], routes[i][1], this[routes[i][1]]); + } + }, + + // Convert a route string into a regular expression, suitable for matching + // against the current location hash. + _routeToRegExp : function(route) { + route = route.replace(escapeRegExp, "\\$&") + .replace(namedParam, "([^\/]*)") + .replace(splatParam, "(.*?)"); + return new RegExp('^' + route + '$'); + }, + + // Given a route, and a URL fragment that it matches, return the array of + // extracted parameters. + _extractParameters : function(route, fragment) { + return route.exec(fragment).slice(1); + } + + }); + + // Backbone.History + // ---------------- + + // Handles cross-browser history management, based on URL fragments. If the + // browser does not support `onhashchange`, falls back to polling. + Backbone.History = function() { + this.handlers = []; + _.bindAll(this, 'checkUrl'); + }; + + // Cached regex for cleaning hashes. + var hashStrip = /^#*/; + + // Cached regex for detecting MSIE. + var isExplorer = /msie [\w.]+/; + + // Has the history handling already been started? + var historyStarted = false; + + // Set up all inheritable **Backbone.History** properties and methods. + _.extend(Backbone.History.prototype, { + + // The default interval to poll for hash changes, if necessary, is + // twenty times a second. + interval: 50, + + // Get the cross-browser normalized URL fragment, either from the URL, + // the hash, or the override. + getFragment : function(fragment, forcePushState) { + if (fragment == null) { + if (this._hasPushState || forcePushState) { + fragment = window.location.pathname; + var search = window.location.search; + if (search) fragment += search; + if (fragment.indexOf(this.options.root) == 0) fragment = fragment.substr(this.options.root.length); + } else { + fragment = window.location.hash; + } + } + return decodeURIComponent(fragment.replace(hashStrip, '')); + }, + + // Start the hash change handling, returning `true` if the current URL matches + // an existing route, and `false` otherwise. + start : function(options) { + + // Figure out the initial configuration. Do we need an iframe? + // Is pushState desired ... is it available? + if (historyStarted) throw new Error("Backbone.history has already been started"); + this.options = _.extend({}, {root: '/'}, this.options, options); + this._wantsPushState = !!this.options.pushState; + this._hasPushState = !!(this.options.pushState && window.history && window.history.pushState); + var fragment = this.getFragment(); + var docMode = document.documentMode; + var oldIE = (isExplorer.exec(navigator.userAgent.toLowerCase()) && (!docMode || docMode <= 7)); + if (oldIE) { + this.iframe = $('' : ''); + inst._keyEvent = false; + return html; + }, + + /* Generate the month and year header. */ + _generateMonthYearHeader: function(inst, drawMonth, drawYear, minDate, maxDate, + secondary, monthNames, monthNamesShort) { + var changeMonth = this._get(inst, 'changeMonth'); + var changeYear = this._get(inst, 'changeYear'); + var showMonthAfterYear = this._get(inst, 'showMonthAfterYear'); + var html = '
    '; + var monthHtml = ''; + // month selection + if (secondary || !changeMonth) + monthHtml += '' + monthNames[drawMonth] + ''; + else { + var inMinYear = (minDate && minDate.getFullYear() == drawYear); + var inMaxYear = (maxDate && maxDate.getFullYear() == drawYear); + monthHtml += ''; + } + if (!showMonthAfterYear) + html += monthHtml + (secondary || !(changeMonth && changeYear) ? ' ' : ''); + // year selection + if ( !inst.yearshtml ) { + inst.yearshtml = ''; + if (secondary || !changeYear) + html += '' + drawYear + ''; + else { + // determine range of years to display + var years = this._get(inst, 'yearRange').split(':'); + var thisYear = new Date().getFullYear(); + var determineYear = function(value) { + var year = (value.match(/c[+-].*/) ? drawYear + parseInt(value.substring(1), 10) : + (value.match(/[+-].*/) ? thisYear + parseInt(value, 10) : + parseInt(value, 10))); + return (isNaN(year) ? thisYear : year); + }; + var year = determineYear(years[0]); + var endYear = Math.max(year, determineYear(years[1] || '')); + year = (minDate ? Math.max(year, minDate.getFullYear()) : year); + endYear = (maxDate ? Math.min(endYear, maxDate.getFullYear()) : endYear); + inst.yearshtml += ''; + + html += inst.yearshtml; + inst.yearshtml = null; + } + } + html += this._get(inst, 'yearSuffix'); + if (showMonthAfterYear) + html += (secondary || !(changeMonth && changeYear) ? ' ' : '') + monthHtml; + html += '
    '; // Close datepicker_header + return html; + }, + + /* Adjust one of the date sub-fields. */ + _adjustInstDate: function(inst, offset, period) { + var year = inst.drawYear + (period == 'Y' ? offset : 0); + var month = inst.drawMonth + (period == 'M' ? offset : 0); + var day = Math.min(inst.selectedDay, this._getDaysInMonth(year, month)) + + (period == 'D' ? offset : 0); + var date = this._restrictMinMax(inst, + this._daylightSavingAdjust(new Date(year, month, day))); + inst.selectedDay = date.getDate(); + inst.drawMonth = inst.selectedMonth = date.getMonth(); + inst.drawYear = inst.selectedYear = date.getFullYear(); + if (period == 'M' || period == 'Y') + this._notifyChange(inst); + }, + + /* Ensure a date is within any min/max bounds. */ + _restrictMinMax: function(inst, date) { + var minDate = this._getMinMaxDate(inst, 'min'); + var maxDate = this._getMinMaxDate(inst, 'max'); + var newDate = (minDate && date < minDate ? minDate : date); + newDate = (maxDate && newDate > maxDate ? maxDate : newDate); + return newDate; + }, + + /* Notify change of month/year. */ + _notifyChange: function(inst) { + var onChange = this._get(inst, 'onChangeMonthYear'); + if (onChange) + onChange.apply((inst.input ? inst.input[0] : null), + [inst.selectedYear, inst.selectedMonth + 1, inst]); + }, + + /* Determine the number of months to show. */ + _getNumberOfMonths: function(inst) { + var numMonths = this._get(inst, 'numberOfMonths'); + return (numMonths == null ? [1, 1] : (typeof numMonths == 'number' ? [1, numMonths] : numMonths)); + }, + + /* Determine the current maximum date - ensure no time components are set. */ + _getMinMaxDate: function(inst, minMax) { + return this._determineDate(inst, this._get(inst, minMax + 'Date'), null); + }, + + /* Find the number of days in a given month. */ + _getDaysInMonth: function(year, month) { + return 32 - this._daylightSavingAdjust(new Date(year, month, 32)).getDate(); + }, + + /* Find the day of the week of the first of a month. */ + _getFirstDayOfMonth: function(year, month) { + return new Date(year, month, 1).getDay(); + }, + + /* Determines if we should allow a "next/prev" month display change. */ + _canAdjustMonth: function(inst, offset, curYear, curMonth) { + var numMonths = this._getNumberOfMonths(inst); + var date = this._daylightSavingAdjust(new Date(curYear, + curMonth + (offset < 0 ? offset : numMonths[0] * numMonths[1]), 1)); + if (offset < 0) + date.setDate(this._getDaysInMonth(date.getFullYear(), date.getMonth())); + return this._isInRange(inst, date); + }, + + /* Is the given date in the accepted range? */ + _isInRange: function(inst, date) { + var minDate = this._getMinMaxDate(inst, 'min'); + var maxDate = this._getMinMaxDate(inst, 'max'); + return ((!minDate || date.getTime() >= minDate.getTime()) && + (!maxDate || date.getTime() <= maxDate.getTime())); + }, + + /* Provide the configuration settings for formatting/parsing. */ + _getFormatConfig: function(inst) { + var shortYearCutoff = this._get(inst, 'shortYearCutoff'); + shortYearCutoff = (typeof shortYearCutoff != 'string' ? shortYearCutoff : + new Date().getFullYear() % 100 + parseInt(shortYearCutoff, 10)); + return {shortYearCutoff: shortYearCutoff, + dayNamesShort: this._get(inst, 'dayNamesShort'), dayNames: this._get(inst, 'dayNames'), + monthNamesShort: this._get(inst, 'monthNamesShort'), monthNames: this._get(inst, 'monthNames')}; + }, + + /* Format the given date for display. */ + _formatDate: function(inst, day, month, year) { + if (!day) { + inst.currentDay = inst.selectedDay; + inst.currentMonth = inst.selectedMonth; + inst.currentYear = inst.selectedYear; + } + var date = (day ? (typeof day == 'object' ? day : + this._daylightSavingAdjust(new Date(year, month, day))) : + this._daylightSavingAdjust(new Date(inst.currentYear, inst.currentMonth, inst.currentDay))); + return this.formatDate(this._get(inst, 'dateFormat'), date, this._getFormatConfig(inst)); + } +}); + +/* + * Bind hover events for datepicker elements. + * Done via delegate so the binding only occurs once in the lifetime of the parent div. + * Global instActive, set by _updateDatepicker allows the handlers to find their way back to the active picker. + */ +function bindHover(dpDiv) { + var selector = 'button, .ui-datepicker-prev, .ui-datepicker-next, .ui-datepicker-calendar td a'; + return dpDiv.bind('mouseout', function(event) { + var elem = $( event.target ).closest( selector ); + if ( !elem.length ) { + return; + } + elem.removeClass( "ui-state-hover ui-datepicker-prev-hover ui-datepicker-next-hover" ); + }) + .bind('mouseover', function(event) { + var elem = $( event.target ).closest( selector ); + if ($.datepicker._isDisabledDatepicker( instActive.inline ? dpDiv.parent()[0] : instActive.input[0]) || + !elem.length ) { + return; + } + elem.parents('.ui-datepicker-calendar').find('a').removeClass('ui-state-hover'); + elem.addClass('ui-state-hover'); + if (elem.hasClass('ui-datepicker-prev')) elem.addClass('ui-datepicker-prev-hover'); + if (elem.hasClass('ui-datepicker-next')) elem.addClass('ui-datepicker-next-hover'); + }); +} + +/* jQuery extend now ignores nulls! */ +function extendRemove(target, props) { + $.extend(target, props); + for (var name in props) + if (props[name] == null || props[name] == undefined) + target[name] = props[name]; + return target; +}; + +/* Determine whether an object is an array. */ +function isArray(a) { + return (a && (($.browser.safari && typeof a == 'object' && a.length) || + (a.constructor && a.constructor.toString().match(/\Array\(\)/)))); +}; + +/* Invoke the datepicker functionality. + @param options string - a command, optionally followed by additional parameters or + Object - settings for attaching new datepicker functionality + @return jQuery object */ +$.fn.datepicker = function(options){ + + /* Verify an empty collection wasn't passed - Fixes #6976 */ + if ( !this.length ) { + return this; + } + + /* Initialise the date picker. */ + if (!$.datepicker.initialized) { + $(document).mousedown($.datepicker._checkExternalClick). + find('body').append($.datepicker.dpDiv); + $.datepicker.initialized = true; + } + + var otherArgs = Array.prototype.slice.call(arguments, 1); + if (typeof options == 'string' && (options == 'isDisabled' || options == 'getDate' || options == 'widget')) + return $.datepicker['_' + options + 'Datepicker']. + apply($.datepicker, [this[0]].concat(otherArgs)); + if (options == 'option' && arguments.length == 2 && typeof arguments[1] == 'string') + return $.datepicker['_' + options + 'Datepicker']. + apply($.datepicker, [this[0]].concat(otherArgs)); + return this.each(function() { + typeof options == 'string' ? + $.datepicker['_' + options + 'Datepicker']. + apply($.datepicker, [this].concat(otherArgs)) : + $.datepicker._attachDatepicker(this, options); + }); +}; + +$.datepicker = new Datepicker(); // singleton instance +$.datepicker.initialized = false; +$.datepicker.uuid = new Date().getTime(); +$.datepicker.version = "1.8.16"; + +// Workaround for #4055 +// Add another global to avoid noConflict issues with inline event handlers +window['DP_jQuery_' + dpuuid] = $; + +})(jQuery); +/* + * jQuery UI Progressbar 1.8.16 + * + * Copyright 2011, AUTHORS.txt (http://jqueryui.com/about) + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * http://docs.jquery.com/UI/Progressbar + * + * Depends: + * jquery.ui.core.js + * jquery.ui.widget.js + */ +(function( $, undefined ) { + +$.widget( "ui.progressbar", { + options: { + value: 0, + max: 100 + }, + + min: 0, + + _create: function() { + this.element + .addClass( "ui-progressbar ui-widget ui-widget-content ui-corner-all" ) + .attr({ + role: "progressbar", + "aria-valuemin": this.min, + "aria-valuemax": this.options.max, + "aria-valuenow": this._value() + }); + + this.valueDiv = $( "
    " ) + .appendTo( this.element ); + + this.oldValue = this._value(); + this._refreshValue(); + }, + + destroy: function() { + this.element + .removeClass( "ui-progressbar ui-widget ui-widget-content ui-corner-all" ) + .removeAttr( "role" ) + .removeAttr( "aria-valuemin" ) + .removeAttr( "aria-valuemax" ) + .removeAttr( "aria-valuenow" ); + + this.valueDiv.remove(); + + $.Widget.prototype.destroy.apply( this, arguments ); + }, + + value: function( newValue ) { + if ( newValue === undefined ) { + return this._value(); + } + + this._setOption( "value", newValue ); + return this; + }, + + _setOption: function( key, value ) { + if ( key === "value" ) { + this.options.value = value; + this._refreshValue(); + if ( this._value() === this.options.max ) { + this._trigger( "complete" ); + } + } + + $.Widget.prototype._setOption.apply( this, arguments ); + }, + + _value: function() { + var val = this.options.value; + // normalize invalid value + if ( typeof val !== "number" ) { + val = 0; + } + return Math.min( this.options.max, Math.max( this.min, val ) ); + }, + + _percentage: function() { + return 100 * this._value() / this.options.max; + }, + + _refreshValue: function() { + var value = this.value(); + var percentage = this._percentage(); + + if ( this.oldValue !== value ) { + this.oldValue = value; + this._trigger( "change" ); + } + + this.valueDiv + .toggle( value > this.min ) + .toggleClass( "ui-corner-right", value === this.options.max ) + .width( percentage.toFixed(0) + "%" ); + this.element.attr( "aria-valuenow", value ); + } +}); + +$.extend( $.ui.progressbar, { + version: "1.8.16" +}); + +})( jQuery ); +/* + * jQuery UI Effects 1.8.16 + * + * Copyright 2011, AUTHORS.txt (http://jqueryui.com/about) + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * http://docs.jquery.com/UI/Effects/ + */ +;jQuery.effects || (function($, undefined) { + +$.effects = {}; + + + +/******************************************************************************/ +/****************************** COLOR ANIMATIONS ******************************/ +/******************************************************************************/ + +// override the animation for color styles +$.each(['backgroundColor', 'borderBottomColor', 'borderLeftColor', + 'borderRightColor', 'borderTopColor', 'borderColor', 'color', 'outlineColor'], +function(i, attr) { + $.fx.step[attr] = function(fx) { + if (!fx.colorInit) { + fx.start = getColor(fx.elem, attr); + fx.end = getRGB(fx.end); + fx.colorInit = true; + } + + fx.elem.style[attr] = 'rgb(' + + Math.max(Math.min(parseInt((fx.pos * (fx.end[0] - fx.start[0])) + fx.start[0], 10), 255), 0) + ',' + + Math.max(Math.min(parseInt((fx.pos * (fx.end[1] - fx.start[1])) + fx.start[1], 10), 255), 0) + ',' + + Math.max(Math.min(parseInt((fx.pos * (fx.end[2] - fx.start[2])) + fx.start[2], 10), 255), 0) + ')'; + }; +}); + +// Color Conversion functions from highlightFade +// By Blair Mitchelmore +// http://jquery.offput.ca/highlightFade/ + +// Parse strings looking for color tuples [255,255,255] +function getRGB(color) { + var result; + + // Check if we're already dealing with an array of colors + if ( color && color.constructor == Array && color.length == 3 ) + return color; + + // Look for rgb(num,num,num) + if (result = /rgb\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*\)/.exec(color)) + return [parseInt(result[1],10), parseInt(result[2],10), parseInt(result[3],10)]; + + // Look for rgb(num%,num%,num%) + if (result = /rgb\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*\)/.exec(color)) + return [parseFloat(result[1])*2.55, parseFloat(result[2])*2.55, parseFloat(result[3])*2.55]; + + // Look for #a0b1c2 + if (result = /#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})/.exec(color)) + return [parseInt(result[1],16), parseInt(result[2],16), parseInt(result[3],16)]; + + // Look for #fff + if (result = /#([a-fA-F0-9])([a-fA-F0-9])([a-fA-F0-9])/.exec(color)) + return [parseInt(result[1]+result[1],16), parseInt(result[2]+result[2],16), parseInt(result[3]+result[3],16)]; + + // Look for rgba(0, 0, 0, 0) == transparent in Safari 3 + if (result = /rgba\(0, 0, 0, 0\)/.exec(color)) + return colors['transparent']; + + // Otherwise, we're most likely dealing with a named color + return colors[$.trim(color).toLowerCase()]; +} + +function getColor(elem, attr) { + var color; + + do { + color = $.curCSS(elem, attr); + + // Keep going until we find an element that has color, or we hit the body + if ( color != '' && color != 'transparent' || $.nodeName(elem, "body") ) + break; + + attr = "backgroundColor"; + } while ( elem = elem.parentNode ); + + return getRGB(color); +}; + +// Some named colors to work with +// From Interface by Stefan Petre +// http://interface.eyecon.ro/ + +var colors = { + aqua:[0,255,255], + azure:[240,255,255], + beige:[245,245,220], + black:[0,0,0], + blue:[0,0,255], + brown:[165,42,42], + cyan:[0,255,255], + darkblue:[0,0,139], + darkcyan:[0,139,139], + darkgrey:[169,169,169], + darkgreen:[0,100,0], + darkkhaki:[189,183,107], + darkmagenta:[139,0,139], + darkolivegreen:[85,107,47], + darkorange:[255,140,0], + darkorchid:[153,50,204], + darkred:[139,0,0], + darksalmon:[233,150,122], + darkviolet:[148,0,211], + fuchsia:[255,0,255], + gold:[255,215,0], + green:[0,128,0], + indigo:[75,0,130], + khaki:[240,230,140], + lightblue:[173,216,230], + lightcyan:[224,255,255], + lightgreen:[144,238,144], + lightgrey:[211,211,211], + lightpink:[255,182,193], + lightyellow:[255,255,224], + lime:[0,255,0], + magenta:[255,0,255], + maroon:[128,0,0], + navy:[0,0,128], + olive:[128,128,0], + orange:[255,165,0], + pink:[255,192,203], + purple:[128,0,128], + violet:[128,0,128], + red:[255,0,0], + silver:[192,192,192], + white:[255,255,255], + yellow:[255,255,0], + transparent: [255,255,255] +}; + + + +/******************************************************************************/ +/****************************** CLASS ANIMATIONS ******************************/ +/******************************************************************************/ + +var classAnimationActions = ['add', 'remove', 'toggle'], + shorthandStyles = { + border: 1, + borderBottom: 1, + borderColor: 1, + borderLeft: 1, + borderRight: 1, + borderTop: 1, + borderWidth: 1, + margin: 1, + padding: 1 + }; + +function getElementStyles() { + var style = document.defaultView + ? document.defaultView.getComputedStyle(this, null) + : this.currentStyle, + newStyle = {}, + key, + camelCase; + + // webkit enumerates style porperties + if (style && style.length && style[0] && style[style[0]]) { + var len = style.length; + while (len--) { + key = style[len]; + if (typeof style[key] == 'string') { + camelCase = key.replace(/\-(\w)/g, function(all, letter){ + return letter.toUpperCase(); + }); + newStyle[camelCase] = style[key]; + } + } + } else { + for (key in style) { + if (typeof style[key] === 'string') { + newStyle[key] = style[key]; + } + } + } + + return newStyle; +} + +function filterStyles(styles) { + var name, value; + for (name in styles) { + value = styles[name]; + if ( + // ignore null and undefined values + value == null || + // ignore functions (when does this occur?) + $.isFunction(value) || + // shorthand styles that need to be expanded + name in shorthandStyles || + // ignore scrollbars (break in IE) + (/scrollbar/).test(name) || + + // only colors or values that can be converted to numbers + (!(/color/i).test(name) && isNaN(parseFloat(value))) + ) { + delete styles[name]; + } + } + + return styles; +} + +function styleDifference(oldStyle, newStyle) { + var diff = { _: 0 }, // http://dev.jquery.com/ticket/5459 + name; + + for (name in newStyle) { + if (oldStyle[name] != newStyle[name]) { + diff[name] = newStyle[name]; + } + } + + return diff; +} + +$.effects.animateClass = function(value, duration, easing, callback) { + if ($.isFunction(easing)) { + callback = easing; + easing = null; + } + + return this.queue(function() { + var that = $(this), + originalStyleAttr = that.attr('style') || ' ', + originalStyle = filterStyles(getElementStyles.call(this)), + newStyle, + className = that.attr('class'); + + $.each(classAnimationActions, function(i, action) { + if (value[action]) { + that[action + 'Class'](value[action]); + } + }); + newStyle = filterStyles(getElementStyles.call(this)); + that.attr('class', className); + + that.animate(styleDifference(originalStyle, newStyle), { + queue: false, + duration: duration, + easing: easing, + complete: function() { + $.each(classAnimationActions, function(i, action) { + if (value[action]) { that[action + 'Class'](value[action]); } + }); + // work around bug in IE by clearing the cssText before setting it + if (typeof that.attr('style') == 'object') { + that.attr('style').cssText = ''; + that.attr('style').cssText = originalStyleAttr; + } else { + that.attr('style', originalStyleAttr); + } + if (callback) { callback.apply(this, arguments); } + $.dequeue( this ); + } + }); + }); +}; + +$.fn.extend({ + _addClass: $.fn.addClass, + addClass: function(classNames, speed, easing, callback) { + return speed ? $.effects.animateClass.apply(this, [{ add: classNames },speed,easing,callback]) : this._addClass(classNames); + }, + + _removeClass: $.fn.removeClass, + removeClass: function(classNames,speed,easing,callback) { + return speed ? $.effects.animateClass.apply(this, [{ remove: classNames },speed,easing,callback]) : this._removeClass(classNames); + }, + + _toggleClass: $.fn.toggleClass, + toggleClass: function(classNames, force, speed, easing, callback) { + if ( typeof force == "boolean" || force === undefined ) { + if ( !speed ) { + // without speed parameter; + return this._toggleClass(classNames, force); + } else { + return $.effects.animateClass.apply(this, [(force?{add:classNames}:{remove:classNames}),speed,easing,callback]); + } + } else { + // without switch parameter; + return $.effects.animateClass.apply(this, [{ toggle: classNames },force,speed,easing]); + } + }, + + switchClass: function(remove,add,speed,easing,callback) { + return $.effects.animateClass.apply(this, [{ add: add, remove: remove },speed,easing,callback]); + } +}); + + + +/******************************************************************************/ +/*********************************** EFFECTS **********************************/ +/******************************************************************************/ + +$.extend($.effects, { + version: "1.8.16", + + // Saves a set of properties in a data storage + save: function(element, set) { + for(var i=0; i < set.length; i++) { + if(set[i] !== null) element.data("ec.storage."+set[i], element[0].style[set[i]]); + } + }, + + // Restores a set of previously saved properties from a data storage + restore: function(element, set) { + for(var i=0; i < set.length; i++) { + if(set[i] !== null) element.css(set[i], element.data("ec.storage."+set[i])); + } + }, + + setMode: function(el, mode) { + if (mode == 'toggle') mode = el.is(':hidden') ? 'show' : 'hide'; // Set for toggle + return mode; + }, + + getBaseline: function(origin, original) { // Translates a [top,left] array into a baseline value + // this should be a little more flexible in the future to handle a string & hash + var y, x; + switch (origin[0]) { + case 'top': y = 0; break; + case 'middle': y = 0.5; break; + case 'bottom': y = 1; break; + default: y = origin[0] / original.height; + }; + switch (origin[1]) { + case 'left': x = 0; break; + case 'center': x = 0.5; break; + case 'right': x = 1; break; + default: x = origin[1] / original.width; + }; + return {x: x, y: y}; + }, + + // Wraps the element around a wrapper that copies position properties + createWrapper: function(element) { + + // if the element is already wrapped, return it + if (element.parent().is('.ui-effects-wrapper')) { + return element.parent(); + } + + // wrap the element + var props = { + width: element.outerWidth(true), + height: element.outerHeight(true), + 'float': element.css('float') + }, + wrapper = $('
    ') + .addClass('ui-effects-wrapper') + .css({ + fontSize: '100%', + background: 'transparent', + border: 'none', + margin: 0, + padding: 0 + }), + active = document.activeElement; + + element.wrap(wrapper); + + // Fixes #7595 - Elements lose focus when wrapped. + if ( element[ 0 ] === active || $.contains( element[ 0 ], active ) ) { + $( active ).focus(); + } + + wrapper = element.parent(); //Hotfix for jQuery 1.4 since some change in wrap() seems to actually loose the reference to the wrapped element + + // transfer positioning properties to the wrapper + if (element.css('position') == 'static') { + wrapper.css({ position: 'relative' }); + element.css({ position: 'relative' }); + } else { + $.extend(props, { + position: element.css('position'), + zIndex: element.css('z-index') + }); + $.each(['top', 'left', 'bottom', 'right'], function(i, pos) { + props[pos] = element.css(pos); + if (isNaN(parseInt(props[pos], 10))) { + props[pos] = 'auto'; + } + }); + element.css({position: 'relative', top: 0, left: 0, right: 'auto', bottom: 'auto' }); + } + + return wrapper.css(props).show(); + }, + + removeWrapper: function(element) { + var parent, + active = document.activeElement; + + if (element.parent().is('.ui-effects-wrapper')) { + parent = element.parent().replaceWith(element); + // Fixes #7595 - Elements lose focus when wrapped. + if ( element[ 0 ] === active || $.contains( element[ 0 ], active ) ) { + $( active ).focus(); + } + return parent; + } + + return element; + }, + + setTransition: function(element, list, factor, value) { + value = value || {}; + $.each(list, function(i, x){ + unit = element.cssUnit(x); + if (unit[0] > 0) value[x] = unit[0] * factor + unit[1]; + }); + return value; + } +}); + + +function _normalizeArguments(effect, options, speed, callback) { + // shift params for method overloading + if (typeof effect == 'object') { + callback = options; + speed = null; + options = effect; + effect = options.effect; + } + if ($.isFunction(options)) { + callback = options; + speed = null; + options = {}; + } + if (typeof options == 'number' || $.fx.speeds[options]) { + callback = speed; + speed = options; + options = {}; + } + if ($.isFunction(speed)) { + callback = speed; + speed = null; + } + + options = options || {}; + + speed = speed || options.duration; + speed = $.fx.off ? 0 : typeof speed == 'number' + ? speed : speed in $.fx.speeds ? $.fx.speeds[speed] : $.fx.speeds._default; + + callback = callback || options.complete; + + return [effect, options, speed, callback]; +} + +function standardSpeed( speed ) { + // valid standard speeds + if ( !speed || typeof speed === "number" || $.fx.speeds[ speed ] ) { + return true; + } + + // invalid strings - treat as "normal" speed + if ( typeof speed === "string" && !$.effects[ speed ] ) { + return true; + } + + return false; +} + +$.fn.extend({ + effect: function(effect, options, speed, callback) { + var args = _normalizeArguments.apply(this, arguments), + // TODO: make effects take actual parameters instead of a hash + args2 = { + options: args[1], + duration: args[2], + callback: args[3] + }, + mode = args2.options.mode, + effectMethod = $.effects[effect]; + + if ( $.fx.off || !effectMethod ) { + // delegate to the original method (e.g., .show()) if possible + if ( mode ) { + return this[ mode ]( args2.duration, args2.callback ); + } else { + return this.each(function() { + if ( args2.callback ) { + args2.callback.call( this ); + } + }); + } + } + + return effectMethod.call(this, args2); + }, + + _show: $.fn.show, + show: function(speed) { + if ( standardSpeed( speed ) ) { + return this._show.apply(this, arguments); + } else { + var args = _normalizeArguments.apply(this, arguments); + args[1].mode = 'show'; + return this.effect.apply(this, args); + } + }, + + _hide: $.fn.hide, + hide: function(speed) { + if ( standardSpeed( speed ) ) { + return this._hide.apply(this, arguments); + } else { + var args = _normalizeArguments.apply(this, arguments); + args[1].mode = 'hide'; + return this.effect.apply(this, args); + } + }, + + // jQuery core overloads toggle and creates _toggle + __toggle: $.fn.toggle, + toggle: function(speed) { + if ( standardSpeed( speed ) || typeof speed === "boolean" || $.isFunction( speed ) ) { + return this.__toggle.apply(this, arguments); + } else { + var args = _normalizeArguments.apply(this, arguments); + args[1].mode = 'toggle'; + return this.effect.apply(this, args); + } + }, + + // helper functions + cssUnit: function(key) { + var style = this.css(key), val = []; + $.each( ['em','px','%','pt'], function(i, unit){ + if(style.indexOf(unit) > 0) + val = [parseFloat(style), unit]; + }); + return val; + } +}); + + + +/******************************************************************************/ +/*********************************** EASING ***********************************/ +/******************************************************************************/ + +/* + * jQuery Easing v1.3 - http://gsgd.co.uk/sandbox/jquery/easing/ + * + * Uses the built in easing capabilities added In jQuery 1.1 + * to offer multiple easing options + * + * TERMS OF USE - jQuery Easing + * + * Open source under the BSD License. + * + * Copyright 2008 George McGinley Smith + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted provided that the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * Neither the name of the author nor the names of contributors may be used to endorse + * or promote products derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY + * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE + * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED + * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + * OF THE POSSIBILITY OF SUCH DAMAGE. + * +*/ + +// t: current time, b: begInnIng value, c: change In value, d: duration +$.easing.jswing = $.easing.swing; + +$.extend($.easing, +{ + def: 'easeOutQuad', + swing: function (x, t, b, c, d) { + //alert($.easing.default); + return $.easing[$.easing.def](x, t, b, c, d); + }, + easeInQuad: function (x, t, b, c, d) { + return c*(t/=d)*t + b; + }, + easeOutQuad: function (x, t, b, c, d) { + return -c *(t/=d)*(t-2) + b; + }, + easeInOutQuad: function (x, t, b, c, d) { + if ((t/=d/2) < 1) return c/2*t*t + b; + return -c/2 * ((--t)*(t-2) - 1) + b; + }, + easeInCubic: function (x, t, b, c, d) { + return c*(t/=d)*t*t + b; + }, + easeOutCubic: function (x, t, b, c, d) { + return c*((t=t/d-1)*t*t + 1) + b; + }, + easeInOutCubic: function (x, t, b, c, d) { + if ((t/=d/2) < 1) return c/2*t*t*t + b; + return c/2*((t-=2)*t*t + 2) + b; + }, + easeInQuart: function (x, t, b, c, d) { + return c*(t/=d)*t*t*t + b; + }, + easeOutQuart: function (x, t, b, c, d) { + return -c * ((t=t/d-1)*t*t*t - 1) + b; + }, + easeInOutQuart: function (x, t, b, c, d) { + if ((t/=d/2) < 1) return c/2*t*t*t*t + b; + return -c/2 * ((t-=2)*t*t*t - 2) + b; + }, + easeInQuint: function (x, t, b, c, d) { + return c*(t/=d)*t*t*t*t + b; + }, + easeOutQuint: function (x, t, b, c, d) { + return c*((t=t/d-1)*t*t*t*t + 1) + b; + }, + easeInOutQuint: function (x, t, b, c, d) { + if ((t/=d/2) < 1) return c/2*t*t*t*t*t + b; + return c/2*((t-=2)*t*t*t*t + 2) + b; + }, + easeInSine: function (x, t, b, c, d) { + return -c * Math.cos(t/d * (Math.PI/2)) + c + b; + }, + easeOutSine: function (x, t, b, c, d) { + return c * Math.sin(t/d * (Math.PI/2)) + b; + }, + easeInOutSine: function (x, t, b, c, d) { + return -c/2 * (Math.cos(Math.PI*t/d) - 1) + b; + }, + easeInExpo: function (x, t, b, c, d) { + return (t==0) ? b : c * Math.pow(2, 10 * (t/d - 1)) + b; + }, + easeOutExpo: function (x, t, b, c, d) { + return (t==d) ? b+c : c * (-Math.pow(2, -10 * t/d) + 1) + b; + }, + easeInOutExpo: function (x, t, b, c, d) { + if (t==0) return b; + if (t==d) return b+c; + if ((t/=d/2) < 1) return c/2 * Math.pow(2, 10 * (t - 1)) + b; + return c/2 * (-Math.pow(2, -10 * --t) + 2) + b; + }, + easeInCirc: function (x, t, b, c, d) { + return -c * (Math.sqrt(1 - (t/=d)*t) - 1) + b; + }, + easeOutCirc: function (x, t, b, c, d) { + return c * Math.sqrt(1 - (t=t/d-1)*t) + b; + }, + easeInOutCirc: function (x, t, b, c, d) { + if ((t/=d/2) < 1) return -c/2 * (Math.sqrt(1 - t*t) - 1) + b; + return c/2 * (Math.sqrt(1 - (t-=2)*t) + 1) + b; + }, + easeInElastic: function (x, t, b, c, d) { + var s=1.70158;var p=0;var a=c; + if (t==0) return b; if ((t/=d)==1) return b+c; if (!p) p=d*.3; + if (a < Math.abs(c)) { a=c; var s=p/4; } + else var s = p/(2*Math.PI) * Math.asin (c/a); + return -(a*Math.pow(2,10*(t-=1)) * Math.sin( (t*d-s)*(2*Math.PI)/p )) + b; + }, + easeOutElastic: function (x, t, b, c, d) { + var s=1.70158;var p=0;var a=c; + if (t==0) return b; if ((t/=d)==1) return b+c; if (!p) p=d*.3; + if (a < Math.abs(c)) { a=c; var s=p/4; } + else var s = p/(2*Math.PI) * Math.asin (c/a); + return a*Math.pow(2,-10*t) * Math.sin( (t*d-s)*(2*Math.PI)/p ) + c + b; + }, + easeInOutElastic: function (x, t, b, c, d) { + var s=1.70158;var p=0;var a=c; + if (t==0) return b; if ((t/=d/2)==2) return b+c; if (!p) p=d*(.3*1.5); + if (a < Math.abs(c)) { a=c; var s=p/4; } + else var s = p/(2*Math.PI) * Math.asin (c/a); + if (t < 1) return -.5*(a*Math.pow(2,10*(t-=1)) * Math.sin( (t*d-s)*(2*Math.PI)/p )) + b; + return a*Math.pow(2,-10*(t-=1)) * Math.sin( (t*d-s)*(2*Math.PI)/p )*.5 + c + b; + }, + easeInBack: function (x, t, b, c, d, s) { + if (s == undefined) s = 1.70158; + return c*(t/=d)*t*((s+1)*t - s) + b; + }, + easeOutBack: function (x, t, b, c, d, s) { + if (s == undefined) s = 1.70158; + return c*((t=t/d-1)*t*((s+1)*t + s) + 1) + b; + }, + easeInOutBack: function (x, t, b, c, d, s) { + if (s == undefined) s = 1.70158; + if ((t/=d/2) < 1) return c/2*(t*t*(((s*=(1.525))+1)*t - s)) + b; + return c/2*((t-=2)*t*(((s*=(1.525))+1)*t + s) + 2) + b; + }, + easeInBounce: function (x, t, b, c, d) { + return c - $.easing.easeOutBounce (x, d-t, 0, c, d) + b; + }, + easeOutBounce: function (x, t, b, c, d) { + if ((t/=d) < (1/2.75)) { + return c*(7.5625*t*t) + b; + } else if (t < (2/2.75)) { + return c*(7.5625*(t-=(1.5/2.75))*t + .75) + b; + } else if (t < (2.5/2.75)) { + return c*(7.5625*(t-=(2.25/2.75))*t + .9375) + b; + } else { + return c*(7.5625*(t-=(2.625/2.75))*t + .984375) + b; + } + }, + easeInOutBounce: function (x, t, b, c, d) { + if (t < d/2) return $.easing.easeInBounce (x, t*2, 0, c, d) * .5 + b; + return $.easing.easeOutBounce (x, t*2-d, 0, c, d) * .5 + c*.5 + b; + } +}); + +/* + * + * TERMS OF USE - EASING EQUATIONS + * + * Open source under the BSD License. + * + * Copyright 2001 Robert Penner + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted provided that the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * Neither the name of the author nor the names of contributors may be used to endorse + * or promote products derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY + * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE + * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED + * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + * OF THE POSSIBILITY OF SUCH DAMAGE. + * + */ + +})(jQuery); +/* + * jQuery UI Effects Blind 1.8.16 + * + * Copyright 2011, AUTHORS.txt (http://jqueryui.com/about) + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * http://docs.jquery.com/UI/Effects/Blind + * + * Depends: + * jquery.effects.core.js + */ +(function( $, undefined ) { + +$.effects.blind = function(o) { + + return this.queue(function() { + + // Create element + var el = $(this), props = ['position','top','bottom','left','right']; + + // Set options + var mode = $.effects.setMode(el, o.options.mode || 'hide'); // Set Mode + var direction = o.options.direction || 'vertical'; // Default direction + + // Adjust + $.effects.save(el, props); el.show(); // Save & Show + var wrapper = $.effects.createWrapper(el).css({overflow:'hidden'}); // Create Wrapper + var ref = (direction == 'vertical') ? 'height' : 'width'; + var distance = (direction == 'vertical') ? wrapper.height() : wrapper.width(); + if(mode == 'show') wrapper.css(ref, 0); // Shift + + // Animation + var animation = {}; + animation[ref] = mode == 'show' ? distance : 0; + + // Animate + wrapper.animate(animation, o.duration, o.options.easing, function() { + if(mode == 'hide') el.hide(); // Hide + $.effects.restore(el, props); $.effects.removeWrapper(el); // Restore + if(o.callback) o.callback.apply(el[0], arguments); // Callback + el.dequeue(); + }); + + }); + +}; + +})(jQuery); +/* + * jQuery UI Effects Bounce 1.8.16 + * + * Copyright 2011, AUTHORS.txt (http://jqueryui.com/about) + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * http://docs.jquery.com/UI/Effects/Bounce + * + * Depends: + * jquery.effects.core.js + */ +(function( $, undefined ) { + +$.effects.bounce = function(o) { + + return this.queue(function() { + + // Create element + var el = $(this), props = ['position','top','bottom','left','right']; + + // Set options + var mode = $.effects.setMode(el, o.options.mode || 'effect'); // Set Mode + var direction = o.options.direction || 'up'; // Default direction + var distance = o.options.distance || 20; // Default distance + var times = o.options.times || 5; // Default # of times + var speed = o.duration || 250; // Default speed per bounce + if (/show|hide/.test(mode)) props.push('opacity'); // Avoid touching opacity to prevent clearType and PNG issues in IE + + // Adjust + $.effects.save(el, props); el.show(); // Save & Show + $.effects.createWrapper(el); // Create Wrapper + var ref = (direction == 'up' || direction == 'down') ? 'top' : 'left'; + var motion = (direction == 'up' || direction == 'left') ? 'pos' : 'neg'; + var distance = o.options.distance || (ref == 'top' ? el.outerHeight({margin:true}) / 3 : el.outerWidth({margin:true}) / 3); + if (mode == 'show') el.css('opacity', 0).css(ref, motion == 'pos' ? -distance : distance); // Shift + if (mode == 'hide') distance = distance / (times * 2); + if (mode != 'hide') times--; + + // Animate + if (mode == 'show') { // Show Bounce + var animation = {opacity: 1}; + animation[ref] = (motion == 'pos' ? '+=' : '-=') + distance; + el.animate(animation, speed / 2, o.options.easing); + distance = distance / 2; + times--; + }; + for (var i = 0; i < times; i++) { // Bounces + var animation1 = {}, animation2 = {}; + animation1[ref] = (motion == 'pos' ? '-=' : '+=') + distance; + animation2[ref] = (motion == 'pos' ? '+=' : '-=') + distance; + el.animate(animation1, speed / 2, o.options.easing).animate(animation2, speed / 2, o.options.easing); + distance = (mode == 'hide') ? distance * 2 : distance / 2; + }; + if (mode == 'hide') { // Last Bounce + var animation = {opacity: 0}; + animation[ref] = (motion == 'pos' ? '-=' : '+=') + distance; + el.animate(animation, speed / 2, o.options.easing, function(){ + el.hide(); // Hide + $.effects.restore(el, props); $.effects.removeWrapper(el); // Restore + if(o.callback) o.callback.apply(this, arguments); // Callback + }); + } else { + var animation1 = {}, animation2 = {}; + animation1[ref] = (motion == 'pos' ? '-=' : '+=') + distance; + animation2[ref] = (motion == 'pos' ? '+=' : '-=') + distance; + el.animate(animation1, speed / 2, o.options.easing).animate(animation2, speed / 2, o.options.easing, function(){ + $.effects.restore(el, props); $.effects.removeWrapper(el); // Restore + if(o.callback) o.callback.apply(this, arguments); // Callback + }); + }; + el.queue('fx', function() { el.dequeue(); }); + el.dequeue(); + }); + +}; + +})(jQuery); +/* + * jQuery UI Effects Clip 1.8.16 + * + * Copyright 2011, AUTHORS.txt (http://jqueryui.com/about) + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * http://docs.jquery.com/UI/Effects/Clip + * + * Depends: + * jquery.effects.core.js + */ +(function( $, undefined ) { + +$.effects.clip = function(o) { + + return this.queue(function() { + + // Create element + var el = $(this), props = ['position','top','bottom','left','right','height','width']; + + // Set options + var mode = $.effects.setMode(el, o.options.mode || 'hide'); // Set Mode + var direction = o.options.direction || 'vertical'; // Default direction + + // Adjust + $.effects.save(el, props); el.show(); // Save & Show + var wrapper = $.effects.createWrapper(el).css({overflow:'hidden'}); // Create Wrapper + var animate = el[0].tagName == 'IMG' ? wrapper : el; + var ref = { + size: (direction == 'vertical') ? 'height' : 'width', + position: (direction == 'vertical') ? 'top' : 'left' + }; + var distance = (direction == 'vertical') ? animate.height() : animate.width(); + if(mode == 'show') { animate.css(ref.size, 0); animate.css(ref.position, distance / 2); } // Shift + + // Animation + var animation = {}; + animation[ref.size] = mode == 'show' ? distance : 0; + animation[ref.position] = mode == 'show' ? 0 : distance / 2; + + // Animate + animate.animate(animation, { queue: false, duration: o.duration, easing: o.options.easing, complete: function() { + if(mode == 'hide') el.hide(); // Hide + $.effects.restore(el, props); $.effects.removeWrapper(el); // Restore + if(o.callback) o.callback.apply(el[0], arguments); // Callback + el.dequeue(); + }}); + + }); + +}; + +})(jQuery); +/* + * jQuery UI Effects Drop 1.8.16 + * + * Copyright 2011, AUTHORS.txt (http://jqueryui.com/about) + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * http://docs.jquery.com/UI/Effects/Drop + * + * Depends: + * jquery.effects.core.js + */ +(function( $, undefined ) { + +$.effects.drop = function(o) { + + return this.queue(function() { + + // Create element + var el = $(this), props = ['position','top','bottom','left','right','opacity']; + + // Set options + var mode = $.effects.setMode(el, o.options.mode || 'hide'); // Set Mode + var direction = o.options.direction || 'left'; // Default Direction + + // Adjust + $.effects.save(el, props); el.show(); // Save & Show + $.effects.createWrapper(el); // Create Wrapper + var ref = (direction == 'up' || direction == 'down') ? 'top' : 'left'; + var motion = (direction == 'up' || direction == 'left') ? 'pos' : 'neg'; + var distance = o.options.distance || (ref == 'top' ? el.outerHeight({margin:true}) / 2 : el.outerWidth({margin:true}) / 2); + if (mode == 'show') el.css('opacity', 0).css(ref, motion == 'pos' ? -distance : distance); // Shift + + // Animation + var animation = {opacity: mode == 'show' ? 1 : 0}; + animation[ref] = (mode == 'show' ? (motion == 'pos' ? '+=' : '-=') : (motion == 'pos' ? '-=' : '+=')) + distance; + + // Animate + el.animate(animation, { queue: false, duration: o.duration, easing: o.options.easing, complete: function() { + if(mode == 'hide') el.hide(); // Hide + $.effects.restore(el, props); $.effects.removeWrapper(el); // Restore + if(o.callback) o.callback.apply(this, arguments); // Callback + el.dequeue(); + }}); + + }); + +}; + +})(jQuery); +/* + * jQuery UI Effects Explode 1.8.16 + * + * Copyright 2011, AUTHORS.txt (http://jqueryui.com/about) + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * http://docs.jquery.com/UI/Effects/Explode + * + * Depends: + * jquery.effects.core.js + */ +(function( $, undefined ) { + +$.effects.explode = function(o) { + + return this.queue(function() { + + var rows = o.options.pieces ? Math.round(Math.sqrt(o.options.pieces)) : 3; + var cells = o.options.pieces ? Math.round(Math.sqrt(o.options.pieces)) : 3; + + o.options.mode = o.options.mode == 'toggle' ? ($(this).is(':visible') ? 'hide' : 'show') : o.options.mode; + var el = $(this).show().css('visibility', 'hidden'); + var offset = el.offset(); + + //Substract the margins - not fixing the problem yet. + offset.top -= parseInt(el.css("marginTop"),10) || 0; + offset.left -= parseInt(el.css("marginLeft"),10) || 0; + + var width = el.outerWidth(true); + var height = el.outerHeight(true); + + for(var i=0;i
    ') + .css({ + position: 'absolute', + visibility: 'visible', + left: -j*(width/cells), + top: -i*(height/rows) + }) + .parent() + .addClass('ui-effects-explode') + .css({ + position: 'absolute', + overflow: 'hidden', + width: width/cells, + height: height/rows, + left: offset.left + j*(width/cells) + (o.options.mode == 'show' ? (j-Math.floor(cells/2))*(width/cells) : 0), + top: offset.top + i*(height/rows) + (o.options.mode == 'show' ? (i-Math.floor(rows/2))*(height/rows) : 0), + opacity: o.options.mode == 'show' ? 0 : 1 + }).animate({ + left: offset.left + j*(width/cells) + (o.options.mode == 'show' ? 0 : (j-Math.floor(cells/2))*(width/cells)), + top: offset.top + i*(height/rows) + (o.options.mode == 'show' ? 0 : (i-Math.floor(rows/2))*(height/rows)), + opacity: o.options.mode == 'show' ? 1 : 0 + }, o.duration || 500); + } + } + + // Set a timeout, to call the callback approx. when the other animations have finished + setTimeout(function() { + + o.options.mode == 'show' ? el.css({ visibility: 'visible' }) : el.css({ visibility: 'visible' }).hide(); + if(o.callback) o.callback.apply(el[0]); // Callback + el.dequeue(); + + $('div.ui-effects-explode').remove(); + + }, o.duration || 500); + + + }); + +}; + +})(jQuery); +/* + * jQuery UI Effects Fade 1.8.16 + * + * Copyright 2011, AUTHORS.txt (http://jqueryui.com/about) + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * http://docs.jquery.com/UI/Effects/Fade + * + * Depends: + * jquery.effects.core.js + */ +(function( $, undefined ) { + +$.effects.fade = function(o) { + return this.queue(function() { + var elem = $(this), + mode = $.effects.setMode(elem, o.options.mode || 'hide'); + + elem.animate({ opacity: mode }, { + queue: false, + duration: o.duration, + easing: o.options.easing, + complete: function() { + (o.callback && o.callback.apply(this, arguments)); + elem.dequeue(); + } + }); + }); +}; + +})(jQuery); +/* + * jQuery UI Effects Fold 1.8.16 + * + * Copyright 2011, AUTHORS.txt (http://jqueryui.com/about) + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * http://docs.jquery.com/UI/Effects/Fold + * + * Depends: + * jquery.effects.core.js + */ +(function( $, undefined ) { + +$.effects.fold = function(o) { + + return this.queue(function() { + + // Create element + var el = $(this), props = ['position','top','bottom','left','right']; + + // Set options + var mode = $.effects.setMode(el, o.options.mode || 'hide'); // Set Mode + var size = o.options.size || 15; // Default fold size + var horizFirst = !(!o.options.horizFirst); // Ensure a boolean value + var duration = o.duration ? o.duration / 2 : $.fx.speeds._default / 2; + + // Adjust + $.effects.save(el, props); el.show(); // Save & Show + var wrapper = $.effects.createWrapper(el).css({overflow:'hidden'}); // Create Wrapper + var widthFirst = ((mode == 'show') != horizFirst); + var ref = widthFirst ? ['width', 'height'] : ['height', 'width']; + var distance = widthFirst ? [wrapper.width(), wrapper.height()] : [wrapper.height(), wrapper.width()]; + var percent = /([0-9]+)%/.exec(size); + if(percent) size = parseInt(percent[1],10) / 100 * distance[mode == 'hide' ? 0 : 1]; + if(mode == 'show') wrapper.css(horizFirst ? {height: 0, width: size} : {height: size, width: 0}); // Shift + + // Animation + var animation1 = {}, animation2 = {}; + animation1[ref[0]] = mode == 'show' ? distance[0] : size; + animation2[ref[1]] = mode == 'show' ? distance[1] : 0; + + // Animate + wrapper.animate(animation1, duration, o.options.easing) + .animate(animation2, duration, o.options.easing, function() { + if(mode == 'hide') el.hide(); // Hide + $.effects.restore(el, props); $.effects.removeWrapper(el); // Restore + if(o.callback) o.callback.apply(el[0], arguments); // Callback + el.dequeue(); + }); + + }); + +}; + +})(jQuery); +/* + * jQuery UI Effects Highlight 1.8.16 + * + * Copyright 2011, AUTHORS.txt (http://jqueryui.com/about) + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * http://docs.jquery.com/UI/Effects/Highlight + * + * Depends: + * jquery.effects.core.js + */ +(function( $, undefined ) { + +$.effects.highlight = function(o) { + return this.queue(function() { + var elem = $(this), + props = ['backgroundImage', 'backgroundColor', 'opacity'], + mode = $.effects.setMode(elem, o.options.mode || 'show'), + animation = { + backgroundColor: elem.css('backgroundColor') + }; + + if (mode == 'hide') { + animation.opacity = 0; + } + + $.effects.save(elem, props); + elem + .show() + .css({ + backgroundImage: 'none', + backgroundColor: o.options.color || '#ffff99' + }) + .animate(animation, { + queue: false, + duration: o.duration, + easing: o.options.easing, + complete: function() { + (mode == 'hide' && elem.hide()); + $.effects.restore(elem, props); + (mode == 'show' && !$.support.opacity && this.style.removeAttribute('filter')); + (o.callback && o.callback.apply(this, arguments)); + elem.dequeue(); + } + }); + }); +}; + +})(jQuery); +/* + * jQuery UI Effects Pulsate 1.8.16 + * + * Copyright 2011, AUTHORS.txt (http://jqueryui.com/about) + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * http://docs.jquery.com/UI/Effects/Pulsate + * + * Depends: + * jquery.effects.core.js + */ +(function( $, undefined ) { + +$.effects.pulsate = function(o) { + return this.queue(function() { + var elem = $(this), + mode = $.effects.setMode(elem, o.options.mode || 'show'); + times = ((o.options.times || 5) * 2) - 1; + duration = o.duration ? o.duration / 2 : $.fx.speeds._default / 2, + isVisible = elem.is(':visible'), + animateTo = 0; + + if (!isVisible) { + elem.css('opacity', 0).show(); + animateTo = 1; + } + + if ((mode == 'hide' && isVisible) || (mode == 'show' && !isVisible)) { + times--; + } + + for (var i = 0; i < times; i++) { + elem.animate({ opacity: animateTo }, duration, o.options.easing); + animateTo = (animateTo + 1) % 2; + } + + elem.animate({ opacity: animateTo }, duration, o.options.easing, function() { + if (animateTo == 0) { + elem.hide(); + } + (o.callback && o.callback.apply(this, arguments)); + }); + + elem + .queue('fx', function() { elem.dequeue(); }) + .dequeue(); + }); +}; + +})(jQuery); +/* + * jQuery UI Effects Scale 1.8.16 + * + * Copyright 2011, AUTHORS.txt (http://jqueryui.com/about) + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * http://docs.jquery.com/UI/Effects/Scale + * + * Depends: + * jquery.effects.core.js + */ +(function( $, undefined ) { + +$.effects.puff = function(o) { + return this.queue(function() { + var elem = $(this), + mode = $.effects.setMode(elem, o.options.mode || 'hide'), + percent = parseInt(o.options.percent, 10) || 150, + factor = percent / 100, + original = { height: elem.height(), width: elem.width() }; + + $.extend(o.options, { + fade: true, + mode: mode, + percent: mode == 'hide' ? percent : 100, + from: mode == 'hide' + ? original + : { + height: original.height * factor, + width: original.width * factor + } + }); + + elem.effect('scale', o.options, o.duration, o.callback); + elem.dequeue(); + }); +}; + +$.effects.scale = function(o) { + + return this.queue(function() { + + // Create element + var el = $(this); + + // Set options + var options = $.extend(true, {}, o.options); + var mode = $.effects.setMode(el, o.options.mode || 'effect'); // Set Mode + var percent = parseInt(o.options.percent,10) || (parseInt(o.options.percent,10) == 0 ? 0 : (mode == 'hide' ? 0 : 100)); // Set default scaling percent + var direction = o.options.direction || 'both'; // Set default axis + var origin = o.options.origin; // The origin of the scaling + if (mode != 'effect') { // Set default origin and restore for show/hide + options.origin = origin || ['middle','center']; + options.restore = true; + } + var original = {height: el.height(), width: el.width()}; // Save original + el.from = o.options.from || (mode == 'show' ? {height: 0, width: 0} : original); // Default from state + + // Adjust + var factor = { // Set scaling factor + y: direction != 'horizontal' ? (percent / 100) : 1, + x: direction != 'vertical' ? (percent / 100) : 1 + }; + el.to = {height: original.height * factor.y, width: original.width * factor.x}; // Set to state + + if (o.options.fade) { // Fade option to support puff + if (mode == 'show') {el.from.opacity = 0; el.to.opacity = 1;}; + if (mode == 'hide') {el.from.opacity = 1; el.to.opacity = 0;}; + }; + + // Animation + options.from = el.from; options.to = el.to; options.mode = mode; + + // Animate + el.effect('size', options, o.duration, o.callback); + el.dequeue(); + }); + +}; + +$.effects.size = function(o) { + + return this.queue(function() { + + // Create element + var el = $(this), props = ['position','top','bottom','left','right','width','height','overflow','opacity']; + var props1 = ['position','top','bottom','left','right','overflow','opacity']; // Always restore + var props2 = ['width','height','overflow']; // Copy for children + var cProps = ['fontSize']; + var vProps = ['borderTopWidth', 'borderBottomWidth', 'paddingTop', 'paddingBottom']; + var hProps = ['borderLeftWidth', 'borderRightWidth', 'paddingLeft', 'paddingRight']; + + // Set options + var mode = $.effects.setMode(el, o.options.mode || 'effect'); // Set Mode + var restore = o.options.restore || false; // Default restore + var scale = o.options.scale || 'both'; // Default scale mode + var origin = o.options.origin; // The origin of the sizing + var original = {height: el.height(), width: el.width()}; // Save original + el.from = o.options.from || original; // Default from state + el.to = o.options.to || original; // Default to state + // Adjust + if (origin) { // Calculate baseline shifts + var baseline = $.effects.getBaseline(origin, original); + el.from.top = (original.height - el.from.height) * baseline.y; + el.from.left = (original.width - el.from.width) * baseline.x; + el.to.top = (original.height - el.to.height) * baseline.y; + el.to.left = (original.width - el.to.width) * baseline.x; + }; + var factor = { // Set scaling factor + from: {y: el.from.height / original.height, x: el.from.width / original.width}, + to: {y: el.to.height / original.height, x: el.to.width / original.width} + }; + if (scale == 'box' || scale == 'both') { // Scale the css box + if (factor.from.y != factor.to.y) { // Vertical props scaling + props = props.concat(vProps); + el.from = $.effects.setTransition(el, vProps, factor.from.y, el.from); + el.to = $.effects.setTransition(el, vProps, factor.to.y, el.to); + }; + if (factor.from.x != factor.to.x) { // Horizontal props scaling + props = props.concat(hProps); + el.from = $.effects.setTransition(el, hProps, factor.from.x, el.from); + el.to = $.effects.setTransition(el, hProps, factor.to.x, el.to); + }; + }; + if (scale == 'content' || scale == 'both') { // Scale the content + if (factor.from.y != factor.to.y) { // Vertical props scaling + props = props.concat(cProps); + el.from = $.effects.setTransition(el, cProps, factor.from.y, el.from); + el.to = $.effects.setTransition(el, cProps, factor.to.y, el.to); + }; + }; + $.effects.save(el, restore ? props : props1); el.show(); // Save & Show + $.effects.createWrapper(el); // Create Wrapper + el.css('overflow','hidden').css(el.from); // Shift + + // Animate + if (scale == 'content' || scale == 'both') { // Scale the children + vProps = vProps.concat(['marginTop','marginBottom']).concat(cProps); // Add margins/font-size + hProps = hProps.concat(['marginLeft','marginRight']); // Add margins + props2 = props.concat(vProps).concat(hProps); // Concat + el.find("*[width]").each(function(){ + child = $(this); + if (restore) $.effects.save(child, props2); + var c_original = {height: child.height(), width: child.width()}; // Save original + child.from = {height: c_original.height * factor.from.y, width: c_original.width * factor.from.x}; + child.to = {height: c_original.height * factor.to.y, width: c_original.width * factor.to.x}; + if (factor.from.y != factor.to.y) { // Vertical props scaling + child.from = $.effects.setTransition(child, vProps, factor.from.y, child.from); + child.to = $.effects.setTransition(child, vProps, factor.to.y, child.to); + }; + if (factor.from.x != factor.to.x) { // Horizontal props scaling + child.from = $.effects.setTransition(child, hProps, factor.from.x, child.from); + child.to = $.effects.setTransition(child, hProps, factor.to.x, child.to); + }; + child.css(child.from); // Shift children + child.animate(child.to, o.duration, o.options.easing, function(){ + if (restore) $.effects.restore(child, props2); // Restore children + }); // Animate children + }); + }; + + // Animate + el.animate(el.to, { queue: false, duration: o.duration, easing: o.options.easing, complete: function() { + if (el.to.opacity === 0) { + el.css('opacity', el.from.opacity); + } + if(mode == 'hide') el.hide(); // Hide + $.effects.restore(el, restore ? props : props1); $.effects.removeWrapper(el); // Restore + if(o.callback) o.callback.apply(this, arguments); // Callback + el.dequeue(); + }}); + + }); + +}; + +})(jQuery); +/* + * jQuery UI Effects Shake 1.8.16 + * + * Copyright 2011, AUTHORS.txt (http://jqueryui.com/about) + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * http://docs.jquery.com/UI/Effects/Shake + * + * Depends: + * jquery.effects.core.js + */ +(function( $, undefined ) { + +$.effects.shake = function(o) { + + return this.queue(function() { + + // Create element + var el = $(this), props = ['position','top','bottom','left','right']; + + // Set options + var mode = $.effects.setMode(el, o.options.mode || 'effect'); // Set Mode + var direction = o.options.direction || 'left'; // Default direction + var distance = o.options.distance || 20; // Default distance + var times = o.options.times || 3; // Default # of times + var speed = o.duration || o.options.duration || 140; // Default speed per shake + + // Adjust + $.effects.save(el, props); el.show(); // Save & Show + $.effects.createWrapper(el); // Create Wrapper + var ref = (direction == 'up' || direction == 'down') ? 'top' : 'left'; + var motion = (direction == 'up' || direction == 'left') ? 'pos' : 'neg'; + + // Animation + var animation = {}, animation1 = {}, animation2 = {}; + animation[ref] = (motion == 'pos' ? '-=' : '+=') + distance; + animation1[ref] = (motion == 'pos' ? '+=' : '-=') + distance * 2; + animation2[ref] = (motion == 'pos' ? '-=' : '+=') + distance * 2; + + // Animate + el.animate(animation, speed, o.options.easing); + for (var i = 1; i < times; i++) { // Shakes + el.animate(animation1, speed, o.options.easing).animate(animation2, speed, o.options.easing); + }; + el.animate(animation1, speed, o.options.easing). + animate(animation, speed / 2, o.options.easing, function(){ // Last shake + $.effects.restore(el, props); $.effects.removeWrapper(el); // Restore + if(o.callback) o.callback.apply(this, arguments); // Callback + }); + el.queue('fx', function() { el.dequeue(); }); + el.dequeue(); + }); + +}; + +})(jQuery); +/* + * jQuery UI Effects Slide 1.8.16 + * + * Copyright 2011, AUTHORS.txt (http://jqueryui.com/about) + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * http://docs.jquery.com/UI/Effects/Slide + * + * Depends: + * jquery.effects.core.js + */ +(function( $, undefined ) { + +$.effects.slide = function(o) { + + return this.queue(function() { + + // Create element + var el = $(this), props = ['position','top','bottom','left','right']; + + // Set options + var mode = $.effects.setMode(el, o.options.mode || 'show'); // Set Mode + var direction = o.options.direction || 'left'; // Default Direction + + // Adjust + $.effects.save(el, props); el.show(); // Save & Show + $.effects.createWrapper(el).css({overflow:'hidden'}); // Create Wrapper + var ref = (direction == 'up' || direction == 'down') ? 'top' : 'left'; + var motion = (direction == 'up' || direction == 'left') ? 'pos' : 'neg'; + var distance = o.options.distance || (ref == 'top' ? el.outerHeight({margin:true}) : el.outerWidth({margin:true})); + if (mode == 'show') el.css(ref, motion == 'pos' ? (isNaN(distance) ? "-" + distance : -distance) : distance); // Shift + + // Animation + var animation = {}; + animation[ref] = (mode == 'show' ? (motion == 'pos' ? '+=' : '-=') : (motion == 'pos' ? '-=' : '+=')) + distance; + + // Animate + el.animate(animation, { queue: false, duration: o.duration, easing: o.options.easing, complete: function() { + if(mode == 'hide') el.hide(); // Hide + $.effects.restore(el, props); $.effects.removeWrapper(el); // Restore + if(o.callback) o.callback.apply(this, arguments); // Callback + el.dequeue(); + }}); + + }); + +}; + +})(jQuery); +/* + * jQuery UI Effects Transfer 1.8.16 + * + * Copyright 2011, AUTHORS.txt (http://jqueryui.com/about) + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * http://docs.jquery.com/UI/Effects/Transfer + * + * Depends: + * jquery.effects.core.js + */ +(function( $, undefined ) { + +$.effects.transfer = function(o) { + return this.queue(function() { + var elem = $(this), + target = $(o.options.to), + endPosition = target.offset(), + animation = { + top: endPosition.top, + left: endPosition.left, + height: target.innerHeight(), + width: target.innerWidth() + }, + startPosition = elem.offset(), + transfer = $('
    ') + .appendTo(document.body) + .addClass(o.options.className) + .css({ + top: startPosition.top, + left: startPosition.left, + height: elem.innerHeight(), + width: elem.innerWidth(), + position: 'absolute' + }) + .animate(animation, o.duration, o.options.easing, function() { + transfer.remove(); + (o.callback && o.callback.apply(elem[0], arguments)); + elem.dequeue(); + }); + }); +}; + +})(jQuery); diff --git a/src/dashboard/src/media/vendor/jquery.event.drag-1.5.js b/src/dashboard/src/media/vendor/jquery.event.drag-1.5.js new file mode 100644 index 0000000000..cf24ffa573 --- /dev/null +++ b/src/dashboard/src/media/vendor/jquery.event.drag-1.5.js @@ -0,0 +1,137 @@ +/*! +jquery.event.drag.js ~ v1.5 ~ Copyright (c) 2008, Three Dub Media (http://threedubmedia.com) +Liscensed under the MIT License ~ http://threedubmedia.googlecode.com/files/MIT-LICENSE.txt +*/ +;(function($){ // secure $ jQuery alias +/*******************************************************************************************/ +// Created: 2008-06-04 | Updated: 2009-03-24 +/*******************************************************************************************/ +// Events: drag, dragstart, dragend +/*******************************************************************************************/ + +// jquery method +$.fn.drag = function( fn1, fn2, fn3 ){ + if ( fn2 ) this.bind('dragstart', fn1 ); // 2+ args + if ( fn3 ) this.bind('dragend', fn3 ); // 3 args + return !fn1 ? this.trigger('drag') // 0 args + : this.bind('drag', fn2 ? fn2 : fn1 ); // 1+ args + }; + +// local refs +var $event = $.event, $special = $event.special, + +// special event configuration +drag = $special.drag = { + not: ':input', // don't begin to drag on event.targets that match this selector + distance: 0, // distance dragged before dragstart + which: 1, // mouse button pressed to start drag sequence + dragging: false, // hold the active target element + setup: function( data ){ + data = $.extend({ + distance: drag.distance, + which: drag.which, + not: drag.not + }, data || {}); + data.distance = squared( data.distance ); // x² + y² = distance² + $event.add( this, "mousedown", handler, data ); + if ( this.attachEvent ) this.attachEvent("ondragstart", dontStart ); // prevent image dragging in IE... + }, + teardown: function(){ + $event.remove( this, "mousedown", handler ); + if ( this === drag.dragging ) drag.dragging = drag.proxy = false; // deactivate element + selectable( this, true ); // enable text selection + if ( this.detachEvent ) this.detachEvent("ondragstart", dontStart ); // prevent image dragging in IE... + } + }; + +// prevent normal event binding... +$special.dragstart = $special.dragend = { setup:function(){}, teardown:function(){} }; + +// handle drag-releatd DOM events +function handler ( event ){ + var elem = this, returned, data = event.data || {}; + // mousemove or mouseup + if ( data.elem ){ + // update event properties... + elem = event.dragTarget = data.elem; // drag source element + event.dragProxy = drag.proxy || elem; // proxy element or source + event.cursorOffsetX = data.pageX - data.left; // mousedown offset + event.cursorOffsetY = data.pageY - data.top; // mousedown offset + event.offsetX = event.pageX - event.cursorOffsetX; // element offset + event.offsetY = event.pageY - event.cursorOffsetY; // element offset + } + // mousedown, check some initial props to avoid the switch statement + else if ( drag.dragging || ( data.which>0 && event.which!=data.which ) || + $( event.target ).is( data.not ) ) return; + // handle various events + switch ( event.type ){ + // mousedown, left click, event.target is not restricted, init dragging + case 'mousedown': + $.extend( data, $( elem ).offset(), { + elem: elem, target: event.target, + pageX: event.pageX, pageY: event.pageY + }); // store some initial attributes + $event.add( document, "mousemove mouseup", handler, data ); + selectable( elem, false ); // disable text selection + drag.dragging = null; // pending state + return false; // prevents text selection in safari + // mousemove, check distance, start dragging + case !drag.dragging && 'mousemove': + if ( squared( event.pageX-data.pageX ) + + squared( event.pageY-data.pageY ) // x² + y² = distance² + < data.distance ) break; // distance tolerance not reached + event.target = data.target; // force target from "mousedown" event (fix distance issue) + returned = hijack( event, "dragstart", elem ); // trigger "dragstart", return proxy element + if ( returned !== false ){ // "dragstart" not rejected + drag.dragging = elem; // activate element + drag.proxy = event.dragProxy = $( returned || elem )[0]; // set proxy + } + // mousemove, dragging + case 'mousemove': + if ( drag.dragging ){ + returned = hijack( event, "drag", elem ); // trigger "drag" + if ( $special.drop ){ // manage drop events + $special.drop.allowed = ( returned !== false ); // prevent drop + $special.drop.handler( event ); // "dropstart", "dropend" + } + if ( returned !== false ) break; // "drag" not rejected, stop + event.type = "mouseup"; // helps "drop" handler behave + } + // mouseup, stop dragging + case 'mouseup': + $event.remove( document, "mousemove mouseup", handler ); // remove page events + if ( drag.dragging ){ + if ( $special.drop ) $special.drop.handler( event ); // "drop" + hijack( event, "dragend", elem ); // trigger "dragend" + } + selectable( elem, true ); // enable text selection + drag.dragging = drag.proxy = data.elem = false; // deactivate element + break; + } + return true; + }; + +// set event type to custom value, and handle it +function hijack ( event, type, elem ){ + event.type = type; // force the event type + var result = $.event.handle.call( elem, event ); + return result===false ? false : result || event.result; + }; + +// return the value squared +function squared ( value ){ return Math.pow( value, 2 ); }; + +// suppress default dragstart IE events... +function dontStart(){ return ( drag.dragging === false ); }; + +// toggles text selection attributes +function selectable ( elem, bool ){ + if ( !elem ) return; // maybe element was removed ? + elem.unselectable = bool ? "off" : "on"; // IE + elem.onselectstart = function(){ return bool; }; // IE + //if ( document.selection && document.selection.empty ) document.selection.empty(); // IE + if ( elem.style ) elem.style.MozUserSelect = bool ? "" : "none"; // FF + }; + +/*******************************************************************************************/ +})( jQuery ); // confine scope \ No newline at end of file diff --git a/src/dashboard/src/media/vendor/jquery.event.drop-1.1.js b/src/dashboard/src/media/vendor/jquery.event.drop-1.1.js new file mode 100644 index 0000000000..1cdd5f54c7 --- /dev/null +++ b/src/dashboard/src/media/vendor/jquery.event.drop-1.1.js @@ -0,0 +1,157 @@ +/*! +jquery.event.drop.js ~ v1.1 ~ Copyright (c) 2008, Three Dub Media (http://threedubmedia.com) +Liscensed under the MIT License ~ http://threedubmedia.googlecode.com/files/MIT-LICENSE.txt +*/ +;(function($){ // secure $ jQuery alias +// Created: 2008-06-04 | Updated: 2009-01-26 +/*******************************************************************************************/ +// Events: drop, dropstart, dropend +/*******************************************************************************************/ + +// JQUERY METHOD +$.fn.drop = function( fn1, fn2, fn3 ){ + if ( fn2 ) this.bind('dropstart', fn1 ); // 2+ args + if ( fn3 ) this.bind('dropend', fn3 ); // 3 args + return !fn1 ? this.trigger('drop') // 0 args + : this.bind('drop', fn2 ? fn2 : fn1 ); // 1+ args + }; + +// DROP MANAGEMENT UTILITY +$.dropManage = function( opts ){ // return filtered drop target elements, cache their positions + opts = opts || {}; + // safely set new options... + drop.data = []; + drop.filter = opts.filter || '*'; + drop.delay = opts.delay || drop.delay; + drop.tolerance = opts.tolerance || null; + drop.mode = opts.mode || drop.mode || 'intersect'; + // return the filtered set of drop targets + return drop.$targets.filter( drop.filter ).each(function(){ + // locate and store the filtered drop targets + drop.data[ drop.data.length ] = drop.locate( this ); + }); + }; + +// local refs +var $event = $.event, $special = $event.special, + +// SPECIAL EVENT CONFIGURATION +drop = $special.drop = { + delay: 100, // default frequency to track drop targets + mode: 'intersect', // default mode to determine valid drop targets + $targets: $([]), data: [], // storage of drop targets and locations + setup: function(){ + drop.$targets = drop.$targets.add( this ); + drop.data[ drop.data.length ] = drop.locate( this ); + }, + teardown: function(){ var elem = this; + drop.$targets = drop.$targets.not( this ); + drop.data = $.grep( drop.data, function( obj ){ + return ( obj.elem !== elem ); + }); + }, + // shared handler + handler: function( event ){ + var dropstart = null, dropped; + event.dropTarget = drop.dropping || undefined; // dropped element + if ( drop.data.length && event.dragTarget ){ + // handle various events + switch ( event.type ){ + // drag/mousemove, from $.event.special.drag + case 'drag': // TOLERATE >> + drop.event = event; // store the mousemove event + if ( !drop.timer ) // monitor drop targets + drop.timer = setTimeout( tolerate, 20 ); + break; + // dragstop/mouseup, from $.event.special.drag + case 'mouseup': // DROP >> DROPEND >> + drop.timer = clearTimeout( drop.timer ); // delete timer + if ( !drop.dropping ) break; // stop, no drop + if ( drop.allowed ) + dropped = hijack( event, "drop", drop.dropping ); // trigger "drop" + dropstart = false; + // activate new target, from tolerate (async) + case drop.dropping && 'dropstart': // DROPSTART >> ( new target ) + dropstart = dropstart===null && drop.allowed ? true : false; + // deactivate active target, from tolerate (async) + case drop.dropping && 'dropend': // DROPEND >> + hijack( event, "dropend", drop.dropping ); // trigger "dropend" + drop.dropping = null; // empty dropper + if ( dropped === false ) event.dropTarget = undefined; + if ( !dropstart ) break; // stop + // activate target, from tolerate (async) + case drop.allowed && 'dropstart': // DROPSTART >> + event.dropTarget = this; + drop.dropping = hijack( event, "dropstart", this )!==false ? this : null; // trigger "dropstart" + break; + } + } + }, + // returns the location positions of an element + locate: function( elem ){ // return { L:left, R:right, T:top, B:bottom, H:height, W:width } + var $el = $(elem), pos = $el.offset(), h = $el.outerHeight(), w = $el.outerWidth(); + return { elem: elem, L: pos.left, R: pos.left+w, T: pos.top, B: pos.top+h, W: w, H: h }; + }, + // test the location positions of an element against another OR an X,Y coord + contains: function( target, test ){ // target { L,R,T,B,H,W } contains test [x,y] or { L,R,T,B,H,W } + return ( ( test[0] || test.L ) >= target.L && ( test[0] || test.R ) <= target.R + && ( test[1] || test.T ) >= target.T && ( test[1] || test.B ) <= target.B ); + }, + // stored tolerance modes + modes: { // fn scope: "$.event.special.drop" object + // target with mouse wins, else target with most overlap wins + 'intersect': function( event, proxy, target ){ + return this.contains( target, [ event.pageX, event.pageY ] ) ? // check cursor + target : this.modes['overlap'].apply( this, arguments ); // check overlap + }, + // target with most overlap wins + 'overlap': function( event, proxy, target ){ + // calculate the area of overlap... + target.overlap = Math.max( 0, Math.min( target.B, proxy.B ) - Math.max( target.T, proxy.T ) ) + * Math.max( 0, Math.min( target.R, proxy.R ) - Math.max( target.L, proxy.L ) ); + if ( target.overlap > ( ( this.best || {} ).overlap || 0 ) ) // compare overlap + this.best = target; // set as the best match so far + return null; // no winner + }, + // proxy is completely contained within target bounds + 'fit': function( event, proxy, target ){ + return this.contains( target, proxy ) ? target : null; + }, + // center of the proxy is contained within target bounds + 'middle': function( event, proxy, target ){ + return this.contains( target, [ proxy.L+proxy.W/2, proxy.T+proxy.H/2 ] ) ? target : null; + } + } + }; + +// set event type to custom value, and handle it +function hijack ( event, type, elem ){ + event.type = type; // force the event type + var result = $event.handle.call( elem, event ); + return result===false ? false : result || event.result; + }; + +// async, recursive tolerance execution +function tolerate (){ + var i = 0, drp, winner, // local variables + xy = [ drop.event.pageX, drop.event.pageY ], // mouse location + drg = drop.locate( drop.event.dragProxy ); // drag proxy location + drop.tolerance = drop.tolerance || drop.modes[ drop.mode ]; // custom or stored tolerance fn + do if ( drp = drop.data[i] ){ // each drop target location + // tolerance function is defined, or mouse contained + winner = drop.tolerance ? drop.tolerance.call( drop, drop.event, drg, drp ) + : drop.contains( drp, xy ) ? drp : null; // mouse is always fallback + } + while ( ++i= v0.9 + /************************* + + // bind to specific elements, allows for multiple timer instances + $(elem).idleTimer(timeout|'destroy'|'getElapsedTime'); + $.data(elem,'idleTimer'); // 'idle' or 'active' + + // if you're using the old $.idleTimer api, you should not do $(document).idleTimer(...) + + // element bound timers will only watch for events inside of them. + // you may just want page-level activity, in which case you may set up + // your timers on document, document.documentElement, and document.body + + + ********/ + +(function($){ + +$.idleTimer = function(newTimeout, elem){ + + // defaults that are to be stored as instance props on the elem + + var idle = false, //indicates if the user is idle + enabled = true, //indicates if the idle timer is enabled + timeout = 30000, //the amount of time (ms) before the user is considered idle + events = 'mousemove keydown DOMMouseScroll mousewheel mousedown'; // activity is one of these events + + + elem = elem || document; + + + + /* (intentionally not documented) + * Toggles the idle state and fires an appropriate event. + * @return {void} + */ + var toggleIdleState = function(myelem){ + + // curse you, mozilla setTimeout lateness bug! + if (typeof myelem == 'number') myelem = undefined; + + var obj = $.data(myelem || elem,'idleTimerObj'); + + //toggle the state + obj.idle = !obj.idle; + + // reset timeout counter + var elapsed = (+new Date) - obj.olddate; + obj.olddate = +new Date; + + // handle Chrome always triggering idle after js alert or comfirm popup + if (obj.idle && (elapsed < timeout)) { + obj.idle = false; + clearTimeout($.idleTimer.tId); + if (enabled) + $.idleTimer.tId = setTimeout(toggleIdleState, timeout); + return; + } + + //fire appropriate event + + // create a custom event, but first, store the new state on the element + // and then append that string to a namespace + var event = jQuery.Event( $.data(elem,'idleTimer', obj.idle ? "idle" : "active" ) + '.idleTimer' ); + + // we dont want this to bubble + event.stopPropagation(); + $(elem).trigger(event); + }, + + /** + * Stops the idle timer. This removes appropriate event handlers + * and cancels any pending timeouts. + * @return {void} + * @method stop + * @static + */ + stop = function(elem){ + + var obj = $.data(elem,'idleTimerObj'); + + //set to disabled + obj.enabled = false; + + //clear any pending timeouts + clearTimeout(obj.tId); + + //detach the event handlers + $(elem).unbind('.idleTimer'); + }, + + + /* (intentionally not documented) + * Handles a user event indicating that the user isn't idle. + * @param {Event} event A DOM2-normalized event object. + * @return {void} + */ + handleUserEvent = function(){ + + var obj = $.data(this,'idleTimerObj'); + + //clear any existing timeout + clearTimeout(obj.tId); + + + + //if the idle timer is enabled + if (obj.enabled){ + + + //if it's idle, that means the user is no longer idle + if (obj.idle){ + toggleIdleState(this); + } + + //set a new timeout + obj.tId = setTimeout(toggleIdleState, obj.timeout); + + } + }; + + + /** + * Starts the idle timer. This adds appropriate event handlers + * and starts the first timeout. + * @param {int} newTimeout (Optional) A new value for the timeout period in ms. + * @return {void} + * @method $.idleTimer + * @static + */ + + + var obj = $.data(elem,'idleTimerObj') || new function(){}; + + obj.olddate = obj.olddate || +new Date; + + //assign a new timeout if necessary + if (typeof newTimeout == "number"){ + timeout = newTimeout; + } else if (newTimeout === 'destroy') { + stop(elem); + return this; + } else if (newTimeout === 'getElapsedTime'){ + return (+new Date) - obj.olddate; + } + + //assign appropriate event handlers + $(elem).bind($.trim((events+' ').split(' ').join('.idleTimer ')),handleUserEvent); + + + obj.idle = idle; + obj.enabled = enabled; + obj.timeout = timeout; + + + //set a timeout to toggle state + obj.tId = setTimeout(toggleIdleState, obj.timeout); + + // assume the user is active for the first x seconds. + $.data(elem,'idleTimer',"active"); + + // store our instance on the object + $.data(elem,'idleTimerObj',obj); + + + +}; // end of $.idleTimer() + + +// v0.9 API for defining multiple timers. +$.fn.idleTimer = function(newTimeout){ + + this[0] && $.idleTimer(newTimeout,this[0]); + + return this; +} + + +})(jQuery); \ No newline at end of file diff --git a/src/dashboard/src/media/vendor/jquery.inputmask.js b/src/dashboard/src/media/vendor/jquery.inputmask.js new file mode 100644 index 0000000000..8c8fbedf1e --- /dev/null +++ b/src/dashboard/src/media/vendor/jquery.inputmask.js @@ -0,0 +1,732 @@ +/* +Input Mask plugin for jquery +http://github.com/RobinHerbots/jquery.inputmask +Copyright (c) 2010 Robin Herbots +Licensed under the MIT license (http://www.opensource.org/licenses/mit-license.php) +Version: 0.4.5e + +This plugin is based on the masked input plugin written by Josh Bush (digitalbush.com) +*/ + +(function($) { + if ($.fn.inputmask == undefined) { + $.inputmask = { + //options default + defaults: { + placeholder: "_", + optionalmarker: { + start: "[", + end: "]" + }, + escapeChar: "\\", + mask: null, + oncomplete: null, //executes when the mask is complete + oncleared: null, //executes when the mask is cleared + repeat: 0, //repetitions of the mask + greedy: true, //true: allocated buffer for the mask and repetitions - false: allocate only if needed + patch_val: true, //override the jquery.val fn to detect changed in the inputmask by setting val(value) + autoUnmask: false, //in combination with patch_val: true => automatically unmask when retrieving the value with $.fn.val + numericInput: false, //numericInput input direction style (input shifts to the left while holding the caret position) + clearMaskOnLostFocus: true, + insertMode: true, //insert the input or overwrite the input + clearIncomplete: false, //clear the incomplete input on blur + aliases: {}, //aliases definitions => see jquery.inputmask.extentions.js + definitions: { + '9': { + "validator": "[0-9]", + "cardinality": 1, + 'prevalidator': null + }, + 'a': { + "validator": "[A-Za-z]", + "cardinality": 1, + "prevalidator": null + }, + '*': { + "validator": "[A-Za-z0-9]", + "cardinality": 1, + "prevalidator": null + }, + 'd': { //day + "validator": "0[1-9]|[12][0-9]|3[01]", + "cardinality": 2, + "prevalidator": [{ "validator": "[0-3]", "cardinality": 1}] + }, + 'm': { //month + "validator": "0[1-9]|1[012]", + "cardinality": 2, + "prevalidator": [{ "validator": "[01]", "cardinality": 1}] + }, + 'y': { //year + "validator": "(19|20)\\d\\d", + "cardinality": 4, + "prevalidator": [ + { "validator": "[12]", "cardinality": 1 }, + { "validator": "(19|20)", "cardinality": 2 }, + { "validator": "(19|20)\\d", "cardinality": 3 } + ] + } + }, + keyCode: { ALT: 18, BACKSPACE: 8, CAPS_LOCK: 20, COMMA: 188, COMMAND: 91, COMMAND_LEFT: 91, COMMAND_RIGHT: 93, CONTROL: 17, DELETE: 46, DOWN: 40, END: 35, ENTER: 13, ESCAPE: 27, HOME: 36, INSERT: 45, LEFT: 37, MENU: 93, NUMPAD_ADD: 107, NUMPAD_DECIMAL: 110, NUMPAD_DIVIDE: 111, NUMPAD_ENTER: 108, + NUMPAD_MULTIPLY: 106, NUMPAD_SUBTRACT: 109, PAGE_DOWN: 34, PAGE_UP: 33, PERIOD: 190, RIGHT: 39, SHIFT: 16, SPACE: 32, TAB: 9, UP: 38, WINDOWS: 91 + } + }, + val: $.fn.val //store the original jquery val function + }; + + $.fn.inputmask = function(fn, options) { + var opts = $.extend(true, {}, $.inputmask.defaults, options); + var pasteEventName = $.browser.msie ? 'paste.inputmask' : 'input.inputmask'; + var iPhone = (window.orientation != undefined); + + var _val = $.inputmask.val; + if (opts.patch_val && $.fn.val.inputmaskpatch != true) { + $.fn.val = function() { + if (this.data('inputmask')) { + if (this.data('inputmask')['autoUnmask'] && arguments.length == 0) { + return this.inputmask('unmaskedvalue'); + } + else { + var result = _val.apply(this, arguments); + if (arguments.length > 0) { + this.triggerHandler('setvalue.inputmask'); + } + return result; + } + } + else { + return _val.apply(this, arguments); + } + }; + $.extend($.fn.val, { + inputmaskpatch: true + }); + } + + if (typeof fn == "string") { + switch (fn) { + case "mask": + //init buffer + var _buffer = getMaskTemplate(); + var tests = getTestingChain(); + + return this.each(function() { + mask(this); + }); + break; + case "unmaskedvalue": + var tests = this.data('inputmask')['tests']; + var _buffer = this.data('inputmask')['_buffer']; + opts.greedy = this.data('inputmask')['greedy']; + opts.repeat = this.data('inputmask')['repeat']; + opts.definitions = this.data('inputmask')['definitions']; + return unmaskedvalue(this); + break; + case "setvalue": + setvalue(this, options); //options in this case the value + break; + case "remove": + var tests, _buffer; + return this.each(function() { + var input = $(this); + if (input.data('inputmask')) { + tests = input.data('inputmask')['tests']; + _buffer = input.data('inputmask')['_buffer']; + opts.greedy = input.data('inputmask')['greedy']; + opts.repeat = input.data('inputmask')['repeat']; + opts.definitions = input.data('inputmask')['definitions']; + //writeout the unmaskedvalue + _val.call(input, unmaskedvalue(input, true)); + //clear data + input.removeData('inputmask'); + //unbind all events + input.unbind(".inputmask"); + input.removeClass('focus.inputmask'); + } + }); + break; + default: + //check if the fn is an alias + if (!ResolveAlias(fn)) { + //maybe fn is a mask so we try + //set mask + opts.mask = fn; + } + //init buffer + var _buffer = getMaskTemplate(); + var tests = getTestingChain(); + + return this.each(function() { + mask(this); + }); + + break; + } + } if (typeof fn == "object") { + opts = $.extend(true, {}, $.inputmask.defaults, fn); + + //init buffer + var _buffer = getMaskTemplate(); + var tests = getTestingChain(); + + return this.each(function() { + mask(this); + }); + } + + //helper functions + function ResolveAlias(aliasStr) { + var aliasDefinition = opts.aliases[aliasStr]; + if (aliasDefinition) + if (!aliasDefinition.alias) { + $.extend(true, opts, aliasDefinition); //merge alias definition in the options + return true; + } else return ResolveAlias(aliasDefinition.alias); //alias is another alias + return false; + } + + function getMaskTemplate() { + var escaped = false, outCount = 0; + if (opts.mask.length == 1 && opts.greedy == false) { opts.placeholder = ""; } //hide placeholder with single non-greedy mask + var singleMask = $.map(opts.mask.split(""), function(element, index) { + var outElem = []; + if (element == opts.escapeChar) { + escaped = true; + } + else if ((element != opts.optionalmarker.start && element != opts.optionalmarker.end) || escaped) { + var maskdef = opts.definitions[element]; + if (maskdef && !escaped) { + for (i = 0; i < maskdef.cardinality; i++) { + outElem.push(getPlaceHolder(outCount + i)); + } + } else { + outElem.push(element); + escaped = false; + } + outCount += outElem.length; + return outElem; + } + }); + + //allocate repetitions + var repeatedMask = singleMask.slice(); + for (var i = 1; i < opts.repeat && opts.greedy; i++) { + repeatedMask = repeatedMask.concat(singleMask.slice()); + } + return repeatedMask; + } + + //test definition => {fn: RegExp/function, cardinality: int, optionality: bool, newBlockMarker: bool, offset: int} + function getTestingChain() { + var isOptional = false, escaped = false; + var newBlockMarker = false; //indicates wheter the begin/ending of a block should be indicated + + return $.map(opts.mask.split(""), function(element, index) { + var outElem = []; + + if (element == opts.escapeChar) { + escaped = true; + } else if (element == opts.optionalmarker.start && !escaped) { + isOptional = true; + newBlockMarker = true; + } + else if (element == opts.optionalmarker.end && !escaped) { + isOptional = false; + newBlockMarker = true; + } + else { + var maskdef = opts.definitions[element]; + if (maskdef && !escaped) { + var prevalidators = maskdef["prevalidator"], prevalidatorsL = prevalidators ? prevalidators.length : 0; + for (i = 1; i < maskdef.cardinality; i++) { + var prevalidator = prevalidatorsL >= i ? prevalidators[i - 1] : [], validator = prevalidator["validator"], cardinality = prevalidator["cardinality"]; + outElem.push({ fn: validator ? typeof validator == 'string' ? new RegExp(validator) : new function() { this.test = validator; } : new RegExp("."), cardinality: cardinality ? cardinality : 1, optionality: isOptional, newBlockMarker: isOptional == true ? newBlockMarker : false, offset: 0 }); + if (isOptional == true) //reset newBlockMarker + newBlockMarker = false; + } + outElem.push({ fn: maskdef.validator ? typeof maskdef.validator == 'string' ? new RegExp(maskdef.validator) : new function() { this.test = maskdef.validator; } : new RegExp("."), cardinality: maskdef.cardinality, optionality: isOptional, newBlockMarker: newBlockMarker, offset: 0 }); + } else { + outElem.push({ fn: null, cardinality: 0, optionality: isOptional, newBlockMarker: newBlockMarker, offset: 0 }); + escaped = false; + } + //reset newBlockMarker + newBlockMarker = false; + return outElem; + } + }); + } + + function isValid(pos, c, buffer) { + if (pos < 0 || pos >= getMaskLength()) return false; + var testPos = determineTestPosition(pos), loopend = c ? 1 : 0, chrs = ''; + for (var i = tests[testPos].cardinality; i > loopend; i--) { + chrs += getBufferElement(buffer, testPos - (i - 1)); + } + + if (c) { chrs += c; } + return tests[testPos].fn != null ? tests[testPos].fn.test(chrs, buffer) : false; + } + + function isMask(pos) { + var testPos = determineTestPosition(pos); + var test = tests[testPos]; + + return test != undefined ? test.fn : false; + } + + function determineTestPosition(pos) { + return pos % tests.length; + } + + function getPlaceHolder(pos) { + return opts.placeholder.charAt(pos % opts.placeholder.length); + } + + function getMaskLength() { + var calculatedLength = _buffer.length; + if (!opts.greedy && opts.repeat > 1) { + calculatedLength += (_buffer.length * (opts.repeat - 1)) + } + return calculatedLength; + } + + //pos: from position + function seekNext(buffer, pos) { + var maskL = getMaskLength(); + if (pos >= maskL) return maskL; + var position = pos; + while (++position < maskL && !isMask(position)) { }; + return position; + } + //pos: from position + function seekPrevious(buffer, pos) { + if (pos <= 0) return 0; + var position = pos; + while (--position > 0 && !isMask(position)) { }; + return position; + } + //these are needed to handle the non-greedy mask repetitions + function setBufferElement(buffer, position, element) { + prepareBuffer(buffer, position); + buffer[position] = element; + } + function getBufferElement(buffer, position) { + prepareBuffer(buffer, position); + return buffer[position]; + } + + function prepareBuffer(buffer, position) { + while ((buffer.length <= position || position < 0) && buffer.length < getMaskLength()) { + var j = 0; + if (opts.numericInput) { + j = determineTestPosition(position); + buffer.unshift(_buffer[j]); + } else while (_buffer[j] !== undefined) { + buffer.push(_buffer[j++]); + } + } + } + + function writeBuffer(input, buffer, caretPos) { + _val.call(input, buffer.join('')); + if (caretPos != undefined) + caret(input, caretPos); + }; + function clearBuffer(buffer, start, end) { + for (var i = start, maskL = getMaskLength(); i < end && i < maskL; i++) { + setBufferElement(buffer, i, getBufferElement(_buffer.slice(), i)); + } + }; + + function SetReTargetPlaceHolder(buffer, pos) { + var testPos = determineTestPosition(pos); + setBufferElement(buffer, pos, getBufferElement(_buffer, testPos)); + } + + function checkVal(input, buffer, clearInvalid) { + var inputValue = TruncateInput(_val.call(input)); + clearBuffer(buffer, 0, buffer.length); + buffer.length = _buffer.length; + var lastMatch = -1, checkPosition = -1, maskL = getMaskLength(), ivl = inputValue.length; + if (opts.numericInput) { + lastMatch += maskL; + var p = seekPrevious(buffer, ivl); + for (var ivp = 0; ivp < ivl; ivp++) { + var c = inputValue.charAt(ivp); + if (isValid(p, c, buffer)) { + for (var i = 0; i < maskL; i++) { + if (isMask(i)) { + SetReTargetPlaceHolder(buffer, i); + + var j = seekNext(buffer, i); + var el = getBufferElement(buffer, j); + if (el != getPlaceHolder(j)) { + if (j < getMaskLength() && isValid(i, el, buffer) !== false) { + setBufferElement(buffer, i, getBufferElement(buffer, j)); + } else { + if (isMask(i)) + break; + } + } + } else + SetReTargetPlaceHolder(buffer, i); + } + lastMatch = seekPrevious(buffer, maskL); + setBufferElement(buffer, lastMatch, c); + } + } + } else { + for (var i = 0; i < ivl; i++) { + for (var pos = checkPosition + 1; pos < maskL; pos++) { + if (isMask(pos)) { + if (isValid(pos, inputValue.charAt(i), buffer) !== false) { + setBufferElement(buffer, pos, inputValue.charAt(i)); + lastMatch = checkPosition = pos; + } else { + SetReTargetPlaceHolder(buffer, pos); + if (isMask(i) && inputValue.charAt(i) == getPlaceHolder(i)) + checkPosition = pos; + } + break; + } else { //nonmask + SetReTargetPlaceHolder(buffer, pos); + if (lastMatch == checkPosition) //once outsync the nonmask cannot be the lastmatch + lastMatch = pos; + checkPosition = pos; + } + } + } + } + if (clearInvalid) { + writeBuffer(input, buffer); + } + return seekNext(buffer, lastMatch); + } + + function EscapeRegex(str) { + var specials = ['/', '.', '*', '+', '?', '|', '(', ')', '[', ']', '{', '}', '\\']; + return str.replace(new RegExp('(\\' + specials.join('|\\') + ')', 'gim'), '\\$1'); + } + function TruncateInput(input) { + return input.replace(new RegExp("(" + EscapeRegex(_buffer.join('')) + ")*$"), ""); + } + + + //functionality fn + function setvalue(el, value) { + _val.call(el, value); + el.triggerHandler('setvalue.inputmask'); + } + + function unmaskedvalue(el, skipDatepickerCheck) { + + if (tests && (skipDatepickerCheck === true || !el.hasClass('hasDatepicker'))) { + var buffer = _buffer.slice(); + checkVal(el, buffer); + return $.map(buffer, function(element, index) { + return isMask(index) && element != getBufferElement(_buffer.slice(), index) ? element : null; + }).join(''); + } + else { + return _val.call(el); + } + } + + function caret(input, begin, end) { + if (input.length == 0) return; + if (typeof begin == 'number') { + end = (typeof end == 'number') ? end : begin; + if (opts.insertMode == false && begin == end) end++; //set visualization for insert/overwrite mode + return input.each(function() { + if (this.setSelectionRange) { + this.focus(); + this.setSelectionRange(begin, end); + } else if (this.createTextRange) { + var range = this.createTextRange(); + range.collapse(true); + range.moveEnd('character', end); + range.moveStart('character', begin); + range.select(); + } + }); + } else { + if (input[0].setSelectionRange) { + begin = input[0].selectionStart; + end = input[0].selectionEnd; + } else if (document.selection && document.selection.createRange) { + var range = document.selection.createRange(); + begin = 0 - range.duplicate().moveStart('character', -100000); + end = begin + range.text.length; + } + return { begin: begin, end: end }; + } + }; + + function mask(el) { + var input = $(el); + //store tests & original buffer in the input element - used to get the unmasked value + input.data('inputmask', { + 'tests': tests, + '_buffer': _buffer, + 'greedy': opts.greedy, + 'repeat': opts.repeat, + 'autoUnmask': opts.autoUnmask, + 'definitions': opts.definitions + }); + + //init buffer + var buffer = _buffer.slice(); + var undoBuffer = _val.call(input); + var ignore = false; //Variable for ignoring control keys + var lastPosition = -1; + var firstMaskPos = seekNext(buffer, -1); + + //unbind all events - to make sure that no other mask will interfere when re-masking + input.unbind(".inputmask"); + input.removeClass('focus.inputmask'); + //bind events + if (!input.attr("readonly")) { + input.bind("mouseenter.inputmask", function() { + var input = $(this); + if (!input.hasClass('focus.inputmask') && _val.call(input).length == 0) { + buffer = _buffer.slice(); + writeBuffer(input, buffer); + } + }).bind("blur.inputmask", function() { + var input = $(this); + input.removeClass('focus.inputmask'); + if (_val.call(input) != undoBuffer) { + input.change(); + } + if (opts.clearMaskOnLostFocus && _val.call(input) == _buffer.join('')) + _val.call(input, ''); + if (opts.clearIncomplete && checkVal(input, buffer, true) != getMaskLength()) { + if (opts.clearMaskOnLostFocus) + _val.call(input, ''); + else { + buffer = _buffer.slice(); + writeBuffer(input, buffer); + } + } + }).bind("focus.inputmask", function() { + var input = $(this); + input.addClass('focus.inputmask'); + undoBuffer = _val.call(input); + }).bind("mouseleave.inputmask", function() { + var input = $(this); + if (opts.clearMaskOnLostFocus && !input.hasClass('focus.inputmask') && _val.call(input) == _buffer.join('')) + _val.call(input, ''); + }).bind("click.inputmask", function() { + var input = $(this); + setTimeout(function() { + var selectedCaret = caret(input); + if (selectedCaret.begin == selectedCaret.end) { + var clickPosition = selectedCaret.begin; + lastPosition = checkVal(input, buffer, false); + caret(input, clickPosition < lastPosition && (isValid(clickPosition, buffer[clickPosition], buffer) || !isMask(clickPosition)) ? clickPosition : lastPosition); + } + }, 0); + }).bind('dblclick.inputmask', function() { + var input = $(this); + setTimeout(function() { + caret(input, 0, lastPosition); + }, 0); + }).bind("keydown.inputmask", keydownEvent + ).bind("keypress.inputmask", keypressEvent + ).bind("keyup.inputmask", function(e) { + var input = $(this); + var k = e.keyCode; + if (k == opts.keyCode.TAB && input.hasClass('focus.inputmask') && _val.call(input).length == 0) { + buffer = _buffer.slice(); + writeBuffer(input, buffer); + if (!opts.numericInput) caret(input, 0); + } + }).bind(pasteEventName, function() { + var input = $(this); + setTimeout(function() { + caret(input, checkVal(input, buffer, true)); + }, 0); + }).bind('setvalue.inputmask', function() { + var input = $(this); + setTimeout(function() { + undoBuffer = _val.call(input); + checkVal(input, buffer, true); + if (_val.call(input) == _buffer.join('')) + _val.call(input, ''); + }, 0); + }); + } + + setTimeout(function() { + lastPosition = checkVal(input, buffer, true); + if (document.activeElement === input[0]) { //position the caret when in focus + input.addClass('focus.inputmask'); + caret(input, lastPosition); + } else if (opts.clearMaskOnLostFocus && _val.call(input) == _buffer.join('')) + _val.call(input, ''); + }, 0); + + //private functions + //shift chars to left from start to end and put c at end position if defined + function shiftL(start, end, c) { + while (!isMask(start) && start - 1 >= 0) start--; + for (var i = start; i <= end && i < getMaskLength(); i++) { + if (isMask(i)) { + SetReTargetPlaceHolder(buffer, i); + var j = seekNext(buffer, i); + var p = getBufferElement(buffer, j); + if (p != getPlaceHolder(j)) { + if (j < getMaskLength() && isValid(i, p, buffer) !== false) { + setBufferElement(buffer, i, getBufferElement(buffer, j)); + } else { + if (isMask(i)) + break; + } + } else if (c == undefined) break; + } else { + SetReTargetPlaceHolder(buffer, i); + } + } + if (c != undefined) + setBufferElement(buffer, seekPrevious(buffer, end), c); + + buffer = TruncateInput(buffer.join('')).split(''); + if (buffer.length == 0) buffer = _buffer.slice(); + + return start; //return the used start position + } + function shiftR(pos, c, full) { //full => behave like a push right ~ do not stop on placeholders + for (var i = pos; i < getMaskLength(); i++) { + if (isMask(i)) { + var t = getBufferElement(buffer, i); + setBufferElement(buffer, i, c); + if (t != getPlaceHolder(i)) { + var j = seekNext(buffer, i); + if (j < getMaskLength()) { + if (isValid(j, t, buffer) !== false) + c = t; + else { + if (isMask(j)) + break; + else c = t; + } + } else break; + } else if (full !== true) break; + } else + SetReTargetPlaceHolder(buffer, i); + } + }; + + function keydownEvent(e) { + var input = $(this); + var pos = caret(input); + var k = e.keyCode; + ignore = (k < 16 || (k > 16 && k < 32) || (k > 32 && k < 41)); + + //delete selection before proceeding + if ((pos.begin - pos.end) != 0 && (!ignore || k == opts.keyCode.BACKSPACE || k == opts.keyCode.DELETE)) + clearBuffer(buffer, pos.begin, pos.end); + + //backspace, delete, and escape get special treatment + if (k == opts.keyCode.BACKSPACE || k == opts.keyCode.DELETE || (iPhone && k == 127)) {//backspace/delete + var maskL = getMaskLength(); + if (pos.begin == 0 && pos.end == maskL) { + buffer = _buffer.slice(); + writeBuffer(input, buffer); + if (!opts.numericInput) caret(input, 0); + } else { + var beginPos = pos.begin - (k == opts.keyCode.DELETE || pos.begin < pos.end ? 0 : 1); + beginPos = shiftL(beginPos < 0 ? 0 : beginPos, maskL); + if (opts.numericInput) { + shiftR(0, getPlaceHolder(0), true); + beginPos = seekNext(buffer, beginPos); + } + writeBuffer(input, buffer, beginPos); + if (!opts.insertMode && k == opts.keyCode.BACKSPACE) { + caret(input, seekPrevious(buffer, beginPos)); + } + } + if (opts.oncleared && _val.call(input) == _buffer.join('')) + opts.oncleared.call(input); + + return false; + } else if (k == opts.keyCode.END || k == opts.keyCode.PAGE_DOWN) { //when END or PAGE_DOWN pressed set position at lastmatch + setTimeout(function() { + var caretPos = checkVal(input, buffer, false); + if (!opts.insertMode && caretPos == getMaskLength() && !e.shiftKey) caretPos--; + caret(input, e.shiftKey ? pos.begin : caretPos, caretPos); + }, 0); + return false; + } else if (k == opts.keyCode.HOME || k == opts.keyCode.PAGE_UP) {//Home or page_up + caret(input, 0, e.shiftKey ? pos.begin : 0); + return false; + } + else if (k == opts.keyCode.ESCAPE) {//escape + _val.call(input, undoBuffer); + caret(input, 0, checkVal(input, buffer)); + return false; + } else if (k == opts.keyCode.INSERT) {//insert + opts.insertMode = !opts.insertMode; + caret(input, !opts.insertMode && pos.begin == getMaskLength() ? pos.begin - 1 : pos.begin); + return false; + } + else if (!opts.insertMode) { //overwritemode + if (k == opts.keyCode.RIGHT) {//right + var caretPos = pos.begin == pos.end ? pos.end + 1 : pos.end; + caretPos = caretPos < getMaskLength() ? caretPos : pos.end; + caret(input, e.shiftKey ? pos.begin : caretPos, e.shiftKey ? caretPos + 1 : caretPos); + return false; + } else if (k == opts.keyCode.LEFT) {//left + var caretPos = pos.begin - 1; + caretPos = caretPos > 0 ? caretPos : 0; + caret(input, caretPos, e.shiftKey ? pos.end : caretPos); + return false; + } + } + } + + function keypressEvent(e) { + var input = $(this); + if (ignore) { + ignore = false; + //Fixes Mac FF bug on backspace + return (e.keyCode == opts.keyCode.BACKSPACE) ? false : null; + } + e = e || window.event; + var k = e.charCode || e.keyCode || e.which; + var pos = caret($(this)); + if (e.ctrlKey || e.altKey || e.metaKey) {//Ignore + return true; + } else if ((k >= 32 && k <= 125) || k > 186) {//typeable characters + var c = String.fromCharCode(k); + if (opts.numericInput) { + var posEnd = opts.greedy ? pos.end : (pos.end + 1); + var p = seekPrevious(buffer, posEnd); + if (isValid(p, c, buffer)) { + if (isValid(firstMaskPos, buffer[firstMaskPos], buffer) == false || (opts.greedy === false && buffer.length < getMaskLength())) { + shiftL(firstMaskPos, posEnd, c); + writeBuffer(input, buffer, posEnd); + } else if (opts.oncomplete) + opts.oncomplete.call(input); + } + } + else { + var p = seekNext(buffer, pos.begin - 1); + if (isValid(p, c, buffer)) { + if (opts.insertMode == true) shiftR(p, c); else setBufferElement(buffer, p, c); + var next = seekNext(buffer, p); + writeBuffer(input, buffer, next); + + if (opts.oncomplete && next == getMaskLength()) + opts.oncomplete.call(input); + } + } + } + return false; + } + + + } + }; + } +})(jQuery); \ No newline at end of file diff --git a/src/dashboard/src/media/vendor/jquery.js b/src/dashboard/src/media/vendor/jquery.js new file mode 100644 index 0000000000..198b3ff07d --- /dev/null +++ b/src/dashboard/src/media/vendor/jquery.js @@ -0,0 +1,4 @@ +/*! jQuery v1.7.1 jquery.com | jquery.org/license */ +(function(a,b){function cy(a){return f.isWindow(a)?a:a.nodeType===9?a.defaultView||a.parentWindow:!1}function cv(a){if(!ck[a]){var b=c.body,d=f("<"+a+">").appendTo(b),e=d.css("display");d.remove();if(e==="none"||e===""){cl||(cl=c.createElement("iframe"),cl.frameBorder=cl.width=cl.height=0),b.appendChild(cl);if(!cm||!cl.createElement)cm=(cl.contentWindow||cl.contentDocument).document,cm.write((c.compatMode==="CSS1Compat"?"":"")+""),cm.close();d=cm.createElement(a),cm.body.appendChild(d),e=f.css(d,"display"),b.removeChild(cl)}ck[a]=e}return ck[a]}function cu(a,b){var c={};f.each(cq.concat.apply([],cq.slice(0,b)),function(){c[this]=a});return c}function ct(){cr=b}function cs(){setTimeout(ct,0);return cr=f.now()}function cj(){try{return new a.ActiveXObject("Microsoft.XMLHTTP")}catch(b){}}function ci(){try{return new a.XMLHttpRequest}catch(b){}}function cc(a,c){a.dataFilter&&(c=a.dataFilter(c,a.dataType));var d=a.dataTypes,e={},g,h,i=d.length,j,k=d[0],l,m,n,o,p;for(g=1;g0){if(c!=="border")for(;g=0===c})}function S(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function K(){return!0}function J(){return!1}function n(a,b,c){var d=b+"defer",e=b+"queue",g=b+"mark",h=f._data(a,d);h&&(c==="queue"||!f._data(a,e))&&(c==="mark"||!f._data(a,g))&&setTimeout(function(){!f._data(a,e)&&!f._data(a,g)&&(f.removeData(a,d,!0),h.fire())},0)}function m(a){for(var b in a){if(b==="data"&&f.isEmptyObject(a[b]))continue;if(b!=="toJSON")return!1}return!0}function l(a,c,d){if(d===b&&a.nodeType===1){var e="data-"+c.replace(k,"-$1").toLowerCase();d=a.getAttribute(e);if(typeof d=="string"){try{d=d==="true"?!0:d==="false"?!1:d==="null"?null:f.isNumeric(d)?parseFloat(d):j.test(d)?f.parseJSON(d):d}catch(g){}f.data(a,c,d)}else d=b}return d}function h(a){var b=g[a]={},c,d;a=a.split(/\s+/);for(c=0,d=a.length;c)[^>]*$|#([\w\-]*)$)/,j=/\S/,k=/^\s+/,l=/\s+$/,m=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,n=/^[\],:{}\s]*$/,o=/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,p=/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,q=/(?:^|:|,)(?:\s*\[)+/g,r=/(webkit)[ \/]([\w.]+)/,s=/(opera)(?:.*version)?[ \/]([\w.]+)/,t=/(msie) ([\w.]+)/,u=/(mozilla)(?:.*? rv:([\w.]+))?/,v=/-([a-z]|[0-9])/ig,w=/^-ms-/,x=function(a,b){return(b+"").toUpperCase()},y=d.userAgent,z,A,B,C=Object.prototype.toString,D=Object.prototype.hasOwnProperty,E=Array.prototype.push,F=Array.prototype.slice,G=String.prototype.trim,H=Array.prototype.indexOf,I={};e.fn=e.prototype={constructor:e,init:function(a,d,f){var g,h,j,k;if(!a)return this;if(a.nodeType){this.context=this[0]=a,this.length=1;return this}if(a==="body"&&!d&&c.body){this.context=c,this[0]=c.body,this.selector=a,this.length=1;return this}if(typeof a=="string"){a.charAt(0)!=="<"||a.charAt(a.length-1)!==">"||a.length<3?g=i.exec(a):g=[null,a,null];if(g&&(g[1]||!d)){if(g[1]){d=d instanceof e?d[0]:d,k=d?d.ownerDocument||d:c,j=m.exec(a),j?e.isPlainObject(d)?(a=[c.createElement(j[1])],e.fn.attr.call(a,d,!0)):a=[k.createElement(j[1])]:(j=e.buildFragment([g[1]],[k]),a=(j.cacheable?e.clone(j.fragment):j.fragment).childNodes);return e.merge(this,a)}h=c.getElementById(g[2]);if(h&&h.parentNode){if(h.id!==g[2])return f.find(a);this.length=1,this[0]=h}this.context=c,this.selector=a;return this}return!d||d.jquery?(d||f).find(a):this.constructor(d).find(a)}if(e.isFunction(a))return f.ready(a);a.selector!==b&&(this.selector=a.selector,this.context=a.context);return e.makeArray(a,this)},selector:"",jquery:"1.7.1",length:0,size:function(){return this.length},toArray:function(){return F.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this[this.length+a]:this[a]},pushStack:function(a,b,c){var d=this.constructor();e.isArray(a)?E.apply(d,a):e.merge(d,a),d.prevObject=this,d.context=this.context,b==="find"?d.selector=this.selector+(this.selector?" ":"")+c:b&&(d.selector=this.selector+"."+b+"("+c+")");return d},each:function(a,b){return e.each(this,a,b)},ready:function(a){e.bindReady(),A.add(a);return this},eq:function(a){a=+a;return a===-1?this.slice(a):this.slice(a,a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(F.apply(this,arguments),"slice",F.call(arguments).join(","))},map:function(a){return this.pushStack(e.map(this,function(b,c){return a.call(b,c,b)}))},end:function(){return this.prevObject||this.constructor(null)},push:E,sort:[].sort,splice:[].splice},e.fn.init.prototype=e.fn,e.extend=e.fn.extend=function(){var a,c,d,f,g,h,i=arguments[0]||{},j=1,k=arguments.length,l=!1;typeof i=="boolean"&&(l=i,i=arguments[1]||{},j=2),typeof i!="object"&&!e.isFunction(i)&&(i={}),k===j&&(i=this,--j);for(;j0)return;A.fireWith(c,[e]),e.fn.trigger&&e(c).trigger("ready").off("ready")}},bindReady:function(){if(!A){A=e.Callbacks("once memory");if(c.readyState==="complete")return setTimeout(e.ready,1);if(c.addEventListener)c.addEventListener("DOMContentLoaded",B,!1),a.addEventListener("load",e.ready,!1);else if(c.attachEvent){c.attachEvent("onreadystatechange",B),a.attachEvent("onload",e.ready);var b=!1;try{b=a.frameElement==null}catch(d){}c.documentElement.doScroll&&b&&J()}}},isFunction:function(a){return e.type(a)==="function"},isArray:Array.isArray||function(a){return e.type(a)==="array"},isWindow:function(a){return a&&typeof a=="object"&&"setInterval"in a},isNumeric:function(a){return!isNaN(parseFloat(a))&&isFinite(a)},type:function(a){return a==null?String(a):I[C.call(a)]||"object"},isPlainObject:function(a){if(!a||e.type(a)!=="object"||a.nodeType||e.isWindow(a))return!1;try{if(a.constructor&&!D.call(a,"constructor")&&!D.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}var d;for(d in a);return d===b||D.call(a,d)},isEmptyObject:function(a){for(var b in a)return!1;return!0},error:function(a){throw new Error(a)},parseJSON:function(b){if(typeof b!="string"||!b)return null;b=e.trim(b);if(a.JSON&&a.JSON.parse)return a.JSON.parse(b);if(n.test(b.replace(o,"@").replace(p,"]").replace(q,"")))return(new Function("return "+b))();e.error("Invalid JSON: "+b)},parseXML:function(c){var d,f;try{a.DOMParser?(f=new DOMParser,d=f.parseFromString(c,"text/xml")):(d=new ActiveXObject("Microsoft.XMLDOM"),d.async="false",d.loadXML(c))}catch(g){d=b}(!d||!d.documentElement||d.getElementsByTagName("parsererror").length)&&e.error("Invalid XML: "+c);return d},noop:function(){},globalEval:function(b){b&&j.test(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(w,"ms-").replace(v,x)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,c,d){var f,g=0,h=a.length,i=h===b||e.isFunction(a);if(d){if(i){for(f in a)if(c.apply(a[f],d)===!1)break}else for(;g0&&a[0]&&a[j-1]||j===0||e.isArray(a));if(k)for(;i1?i.call(arguments,0):b,j.notifyWith(k,e)}}function l(a){return function(c){b[a]=arguments.length>1?i.call(arguments,0):c,--g||j.resolveWith(j,b)}}var b=i.call(arguments,0),c=0,d=b.length,e=Array(d),g=d,h=d,j=d<=1&&a&&f.isFunction(a.promise)?a:f.Deferred(),k=j.promise();if(d>1){for(;c
    a",d=q.getElementsByTagName("*"),e=q.getElementsByTagName("a")[0];if(!d||!d.length||!e)return{};g=c.createElement("select"),h=g.appendChild(c.createElement("option")),i=q.getElementsByTagName("input")[0],b={leadingWhitespace:q.firstChild.nodeType===3,tbody:!q.getElementsByTagName("tbody").length,htmlSerialize:!!q.getElementsByTagName("link").length,style:/top/.test(e.getAttribute("style")),hrefNormalized:e.getAttribute("href")==="/a",opacity:/^0.55/.test(e.style.opacity),cssFloat:!!e.style.cssFloat,checkOn:i.value==="on",optSelected:h.selected,getSetAttribute:q.className!=="t",enctype:!!c.createElement("form").enctype,html5Clone:c.createElement("nav").cloneNode(!0).outerHTML!=="<:nav>",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0},i.checked=!0,b.noCloneChecked=i.cloneNode(!0).checked,g.disabled=!0,b.optDisabled=!h.disabled;try{delete q.test}catch(s){b.deleteExpando=!1}!q.addEventListener&&q.attachEvent&&q.fireEvent&&(q.attachEvent("onclick",function(){b.noCloneEvent=!1}),q.cloneNode(!0).fireEvent("onclick")),i=c.createElement("input"),i.value="t",i.setAttribute("type","radio"),b.radioValue=i.value==="t",i.setAttribute("checked","checked"),q.appendChild(i),k=c.createDocumentFragment(),k.appendChild(q.lastChild),b.checkClone=k.cloneNode(!0).cloneNode(!0).lastChild.checked,b.appendChecked=i.checked,k.removeChild(i),k.appendChild(q),q.innerHTML="",a.getComputedStyle&&(j=c.createElement("div"),j.style.width="0",j.style.marginRight="0",q.style.width="2px",q.appendChild(j),b.reliableMarginRight=(parseInt((a.getComputedStyle(j,null)||{marginRight:0}).marginRight,10)||0)===0);if(q.attachEvent)for(o in{submit:1,change:1,focusin:1})n="on"+o,p=n in q,p||(q.setAttribute(n,"return;"),p=typeof q[n]=="function"),b[o+"Bubbles"]=p;k.removeChild(q),k=g=h=j=q=i=null,f(function(){var a,d,e,g,h,i,j,k,m,n,o,r=c.getElementsByTagName("body")[0];!r||(j=1,k="position:absolute;top:0;left:0;width:1px;height:1px;margin:0;",m="visibility:hidden;border:0;",n="style='"+k+"border:5px solid #000;padding:0;'",o="
    "+""+"
    ",a=c.createElement("div"),a.style.cssText=m+"width:0;height:0;position:static;top:0;margin-top:"+j+"px",r.insertBefore(a,r.firstChild),q=c.createElement("div"),a.appendChild(q),q.innerHTML="
    t
    ",l=q.getElementsByTagName("td"),p=l[0].offsetHeight===0,l[0].style.display="",l[1].style.display="none",b.reliableHiddenOffsets=p&&l[0].offsetHeight===0,q.innerHTML="",q.style.width=q.style.paddingLeft="1px",f.boxModel=b.boxModel=q.offsetWidth===2,typeof q.style.zoom!="undefined"&&(q.style.display="inline",q.style.zoom=1,b.inlineBlockNeedsLayout=q.offsetWidth===2,q.style.display="",q.innerHTML="
    ",b.shrinkWrapBlocks=q.offsetWidth!==2),q.style.cssText=k+m,q.innerHTML=o,d=q.firstChild,e=d.firstChild,h=d.nextSibling.firstChild.firstChild,i={doesNotAddBorder:e.offsetTop!==5,doesAddBorderForTableAndCells:h.offsetTop===5},e.style.position="fixed",e.style.top="20px",i.fixedPosition=e.offsetTop===20||e.offsetTop===15,e.style.position=e.style.top="",d.style.overflow="hidden",d.style.position="relative",i.subtractsBorderForOverflowNotVisible=e.offsetTop===-5,i.doesNotIncludeMarginInBodyOffset=r.offsetTop!==j,r.removeChild(a),q=a=null,f.extend(b,i))});return b}();var j=/^(?:\{.*\}|\[.*\])$/,k=/([A-Z])/g;f.extend({cache:{},uuid:0,expando:"jQuery"+(f.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(a){a=a.nodeType?f.cache[a[f.expando]]:a[f.expando];return!!a&&!m(a)},data:function(a,c,d,e){if(!!f.acceptData(a)){var g,h,i,j=f.expando,k=typeof c=="string",l=a.nodeType,m=l?f.cache:a,n=l?a[j]:a[j]&&j,o=c==="events";if((!n||!m[n]||!o&&!e&&!m[n].data)&&k&&d===b)return;n||(l?a[j]=n=++f.uuid:n=j),m[n]||(m[n]={},l||(m[n].toJSON=f.noop));if(typeof c=="object"||typeof c=="function")e?m[n]=f.extend(m[n],c):m[n].data=f.extend(m[n].data,c);g=h=m[n],e||(h.data||(h.data={}),h=h.data),d!==b&&(h[f.camelCase(c)]=d);if(o&&!h[c])return g.events;k?(i=h[c],i==null&&(i=h[f.camelCase(c)])):i=h;return i}},removeData:function(a,b,c){if(!!f.acceptData(a)){var d,e,g,h=f.expando,i=a.nodeType,j=i?f.cache:a,k=i?a[h]:h;if(!j[k])return;if(b){d=c?j[k]:j[k].data;if(d){f.isArray(b)||(b in d?b=[b]:(b=f.camelCase(b),b in d?b=[b]:b=b.split(" ")));for(e=0,g=b.length;e-1)return!0;return!1},val:function(a){var c,d,e,g=this[0];{if(!!arguments.length){e=f.isFunction(a);return this.each(function(d){var g=f(this),h;if(this.nodeType===1){e?h=a.call(this,d,g.val()):h=a,h==null?h="":typeof h=="number"?h+="":f.isArray(h)&&(h=f.map(h,function(a){return a==null?"":a+""})),c=f.valHooks[this.nodeName.toLowerCase()]||f.valHooks[this.type];if(!c||!("set"in c)||c.set(this,h,"value")===b)this.value=h}})}if(g){c=f.valHooks[g.nodeName.toLowerCase()]||f.valHooks[g.type];if(c&&"get"in c&&(d=c.get(g,"value"))!==b)return d;d=g.value;return typeof d=="string"?d.replace(q,""):d==null?"":d}}}}),f.extend({valHooks:{option:{get:function(a){var b=a.attributes.value;return!b||b.specified?a.value:a.text}},select:{get:function(a){var b,c,d,e,g=a.selectedIndex,h=[],i=a.options,j=a.type==="select-one";if(g<0)return null;c=j?g:0,d=j?g+1:i.length;for(;c=0}),c.length||(a.selectedIndex=-1);return c}}},attrFn:{val:!0,css:!0,html:!0,text:!0,data:!0,width:!0,height:!0,offset:!0},attr:function(a,c,d,e){var g,h,i,j=a.nodeType;if(!!a&&j!==3&&j!==8&&j!==2){if(e&&c in f.attrFn)return f(a)[c](d);if(typeof a.getAttribute=="undefined")return f.prop(a,c,d);i=j!==1||!f.isXMLDoc(a),i&&(c=c.toLowerCase(),h=f.attrHooks[c]||(u.test(c)?x:w));if(d!==b){if(d===null){f.removeAttr(a,c);return}if(h&&"set"in h&&i&&(g=h.set(a,d,c))!==b)return g;a.setAttribute(c,""+d);return d}if(h&&"get"in h&&i&&(g=h.get(a,c))!==null)return g;g=a.getAttribute(c);return g===null?b:g}},removeAttr:function(a,b){var c,d,e,g,h=0;if(b&&a.nodeType===1){d=b.toLowerCase().split(p),g=d.length;for(;h=0}})});var z=/^(?:textarea|input|select)$/i,A=/^([^\.]*)?(?:\.(.+))?$/,B=/\bhover(\.\S+)?\b/,C=/^key/,D=/^(?:mouse|contextmenu)|click/,E=/^(?:focusinfocus|focusoutblur)$/,F=/^(\w*)(?:#([\w\-]+))?(?:\.([\w\-]+))?$/,G=function(a){var b=F.exec(a);b&&(b[1]=(b[1]||"").toLowerCase(),b[3]=b[3]&&new RegExp("(?:^|\\s)"+b[3]+"(?:\\s|$)"));return b},H=function(a,b){var c=a.attributes||{};return(!b[1]||a.nodeName.toLowerCase()===b[1])&&(!b[2]||(c.id||{}).value===b[2])&&(!b[3]||b[3].test((c["class"]||{}).value))},I=function(a){return f.event.special.hover?a:a.replace(B,"mouseenter$1 mouseleave$1")}; +f.event={add:function(a,c,d,e,g){var h,i,j,k,l,m,n,o,p,q,r,s;if(!(a.nodeType===3||a.nodeType===8||!c||!d||!(h=f._data(a)))){d.handler&&(p=d,d=p.handler),d.guid||(d.guid=f.guid++),j=h.events,j||(h.events=j={}),i=h.handle,i||(h.handle=i=function(a){return typeof f!="undefined"&&(!a||f.event.triggered!==a.type)?f.event.dispatch.apply(i.elem,arguments):b},i.elem=a),c=f.trim(I(c)).split(" ");for(k=0;k=0&&(h=h.slice(0,-1),k=!0),h.indexOf(".")>=0&&(i=h.split("."),h=i.shift(),i.sort());if((!e||f.event.customEvent[h])&&!f.event.global[h])return;c=typeof c=="object"?c[f.expando]?c:new f.Event(h,c):new f.Event(h),c.type=h,c.isTrigger=!0,c.exclusive=k,c.namespace=i.join("."),c.namespace_re=c.namespace?new RegExp("(^|\\.)"+i.join("\\.(?:.*\\.)?")+"(\\.|$)"):null,o=h.indexOf(":")<0?"on"+h:"";if(!e){j=f.cache;for(l in j)j[l].events&&j[l].events[h]&&f.event.trigger(c,d,j[l].handle.elem,!0);return}c.result=b,c.target||(c.target=e),d=d!=null?f.makeArray(d):[],d.unshift(c),p=f.event.special[h]||{};if(p.trigger&&p.trigger.apply(e,d)===!1)return;r=[[e,p.bindType||h]];if(!g&&!p.noBubble&&!f.isWindow(e)){s=p.delegateType||h,m=E.test(s+h)?e:e.parentNode,n=null;for(;m;m=m.parentNode)r.push([m,s]),n=m;n&&n===e.ownerDocument&&r.push([n.defaultView||n.parentWindow||a,s])}for(l=0;le&&i.push({elem:this,matches:d.slice(e)});for(j=0;j0?this.on(b,null,a,c):this.trigger(b)},f.attrFn&&(f.attrFn[b]=!0),C.test(b)&&(f.event.fixHooks[b]=f.event.keyHooks),D.test(b)&&(f.event.fixHooks[b]=f.event.mouseHooks)}),function(){function x(a,b,c,e,f,g){for(var h=0,i=e.length;h0){k=j;break}}j=j[a]}e[h]=k}}}function w(a,b,c,e,f,g){for(var h=0,i=e.length;h+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,d="sizcache"+(Math.random()+"").replace(".",""),e=0,g=Object.prototype.toString,h=!1,i=!0,j=/\\/g,k=/\r\n/g,l=/\W/;[0,0].sort(function(){i=!1;return 0});var m=function(b,d,e,f){e=e||[],d=d||c;var h=d;if(d.nodeType!==1&&d.nodeType!==9)return[];if(!b||typeof b!="string")return e;var i,j,k,l,n,q,r,t,u=!0,v=m.isXML(d),w=[],x=b;do{a.exec(""),i=a.exec(x);if(i){x=i[3],w.push(i[1]);if(i[2]){l=i[3];break}}}while(i);if(w.length>1&&p.exec(b))if(w.length===2&&o.relative[w[0]])j=y(w[0]+w[1],d,f);else{j=o.relative[w[0]]?[d]:m(w.shift(),d);while(w.length)b=w.shift(),o.relative[b]&&(b+=w.shift()),j=y(b,j,f)}else{!f&&w.length>1&&d.nodeType===9&&!v&&o.match.ID.test(w[0])&&!o.match.ID.test(w[w.length-1])&&(n=m.find(w.shift(),d,v),d=n.expr?m.filter(n.expr,n.set)[0]:n.set[0]);if(d){n=f?{expr:w.pop(),set:s(f)}:m.find(w.pop(),w.length===1&&(w[0]==="~"||w[0]==="+")&&d.parentNode?d.parentNode:d,v),j=n.expr?m.filter(n.expr,n.set):n.set,w.length>0?k=s(j):u=!1;while(w.length)q=w.pop(),r=q,o.relative[q]?r=w.pop():q="",r==null&&(r=d),o.relative[q](k,r,v)}else k=w=[]}k||(k=j),k||m.error(q||b);if(g.call(k)==="[object Array]")if(!u)e.push.apply(e,k);else if(d&&d.nodeType===1)for(t=0;k[t]!=null;t++)k[t]&&(k[t]===!0||k[t].nodeType===1&&m.contains(d,k[t]))&&e.push(j[t]);else for(t=0;k[t]!=null;t++)k[t]&&k[t].nodeType===1&&e.push(j[t]);else s(k,e);l&&(m(l,h,e,f),m.uniqueSort(e));return e};m.uniqueSort=function(a){if(u){h=i,a.sort(u);if(h)for(var b=1;b0},m.find=function(a,b,c){var d,e,f,g,h,i;if(!a)return[];for(e=0,f=o.order.length;e":function(a,b){var c,d=typeof b=="string",e=0,f=a.length;if(d&&!l.test(b)){b=b.toLowerCase();for(;e=0)?c||d.push(h):c&&(b[g]=!1));return!1},ID:function(a){return a[1].replace(j,"")},TAG:function(a,b){return a[1].replace(j,"").toLowerCase()},CHILD:function(a){if(a[1]==="nth"){a[2]||m.error(a[0]),a[2]=a[2].replace(/^\+|\s*/g,"");var b=/(-?)(\d*)(?:n([+\-]?\d*))?/.exec(a[2]==="even"&&"2n"||a[2]==="odd"&&"2n+1"||!/\D/.test(a[2])&&"0n+"+a[2]||a[2]);a[2]=b[1]+(b[2]||1)-0,a[3]=b[3]-0}else a[2]&&m.error(a[0]);a[0]=e++;return a},ATTR:function(a,b,c,d,e,f){var g=a[1]=a[1].replace(j,"");!f&&o.attrMap[g]&&(a[1]=o.attrMap[g]),a[4]=(a[4]||a[5]||"").replace(j,""),a[2]==="~="&&(a[4]=" "+a[4]+" ");return a},PSEUDO:function(b,c,d,e,f){if(b[1]==="not")if((a.exec(b[3])||"").length>1||/^\w/.test(b[3]))b[3]=m(b[3],null,null,c);else{var g=m.filter(b[3],c,d,!0^f);d||e.push.apply(e,g);return!1}else if(o.match.POS.test(b[0])||o.match.CHILD.test(b[0]))return!0;return b},POS:function(a){a.unshift(!0);return a}},filters:{enabled:function(a){return a.disabled===!1&&a.type!=="hidden"},disabled:function(a){return a.disabled===!0},checked:function(a){return a.checked===!0},selected:function(a){a.parentNode&&a.parentNode.selectedIndex;return a.selected===!0},parent:function(a){return!!a.firstChild},empty:function(a){return!a.firstChild},has:function(a,b,c){return!!m(c[3],a).length},header:function(a){return/h\d/i.test(a.nodeName)},text:function(a){var b=a.getAttribute("type"),c=a.type;return a.nodeName.toLowerCase()==="input"&&"text"===c&&(b===c||b===null)},radio:function(a){return a.nodeName.toLowerCase()==="input"&&"radio"===a.type},checkbox:function(a){return a.nodeName.toLowerCase()==="input"&&"checkbox"===a.type},file:function(a){return a.nodeName.toLowerCase()==="input"&&"file"===a.type},password:function(a){return a.nodeName.toLowerCase()==="input"&&"password"===a.type},submit:function(a){var b=a.nodeName.toLowerCase();return(b==="input"||b==="button")&&"submit"===a.type},image:function(a){return a.nodeName.toLowerCase()==="input"&&"image"===a.type},reset:function(a){var b=a.nodeName.toLowerCase();return(b==="input"||b==="button")&&"reset"===a.type},button:function(a){var b=a.nodeName.toLowerCase();return b==="input"&&"button"===a.type||b==="button"},input:function(a){return/input|select|textarea|button/i.test(a.nodeName)},focus:function(a){return a===a.ownerDocument.activeElement}},setFilters:{first:function(a,b){return b===0},last:function(a,b,c,d){return b===d.length-1},even:function(a,b){return b%2===0},odd:function(a,b){return b%2===1},lt:function(a,b,c){return bc[3]-0},nth:function(a,b,c){return c[3]-0===b},eq:function(a,b,c){return c[3]-0===b}},filter:{PSEUDO:function(a,b,c,d){var e=b[1],f=o.filters[e];if(f)return f(a,c,b,d);if(e==="contains")return(a.textContent||a.innerText||n([a])||"").indexOf(b[3])>=0;if(e==="not"){var g=b[3];for(var h=0,i=g.length;h=0}},ID:function(a,b){return a.nodeType===1&&a.getAttribute("id")===b},TAG:function(a,b){return b==="*"&&a.nodeType===1||!!a.nodeName&&a.nodeName.toLowerCase()===b},CLASS:function(a,b){return(" "+(a.className||a.getAttribute("class"))+" ").indexOf(b)>-1},ATTR:function(a,b){var c=b[1],d=m.attr?m.attr(a,c):o.attrHandle[c]?o.attrHandle[c](a):a[c]!=null?a[c]:a.getAttribute(c),e=d+"",f=b[2],g=b[4];return d==null?f==="!=":!f&&m.attr?d!=null:f==="="?e===g:f==="*="?e.indexOf(g)>=0:f==="~="?(" "+e+" ").indexOf(g)>=0:g?f==="!="?e!==g:f==="^="?e.indexOf(g)===0:f==="$="?e.substr(e.length-g.length)===g:f==="|="?e===g||e.substr(0,g.length+1)===g+"-":!1:e&&d!==!1},POS:function(a,b,c,d){var e=b[2],f=o.setFilters[e];if(f)return f(a,c,b,d)}}},p=o.match.POS,q=function(a,b){return"\\"+(b-0+1)};for(var r in o.match)o.match[r]=new RegExp(o.match[r].source+/(?![^\[]*\])(?![^\(]*\))/.source),o.leftMatch[r]=new RegExp(/(^(?:.|\r|\n)*?)/.source+o.match[r].source.replace(/\\(\d+)/g,q));var s=function(a,b){a=Array.prototype.slice.call(a,0);if(b){b.push.apply(b,a);return b}return a};try{Array.prototype.slice.call(c.documentElement.childNodes,0)[0].nodeType}catch(t){s=function(a,b){var c=0,d=b||[];if(g.call(a)==="[object Array]")Array.prototype.push.apply(d,a);else if(typeof a.length=="number")for(var e=a.length;c",e.insertBefore(a,e.firstChild),c.getElementById(d)&&(o.find.ID=function(a,c,d){if(typeof c.getElementById!="undefined"&&!d){var e=c.getElementById(a[1]);return e?e.id===a[1]||typeof e.getAttributeNode!="undefined"&&e.getAttributeNode("id").nodeValue===a[1]?[e]:b:[]}},o.filter.ID=function(a,b){var c=typeof a.getAttributeNode!="undefined"&&a.getAttributeNode("id");return a.nodeType===1&&c&&c.nodeValue===b}),e.removeChild(a),e=a=null}(),function(){var a=c.createElement("div");a.appendChild(c.createComment("")),a.getElementsByTagName("*").length>0&&(o.find.TAG=function(a,b){var c=b.getElementsByTagName(a[1]);if(a[1]==="*"){var d=[];for(var e=0;c[e];e++)c[e].nodeType===1&&d.push(c[e]);c=d}return c}),a.innerHTML="",a.firstChild&&typeof a.firstChild.getAttribute!="undefined"&&a.firstChild.getAttribute("href")!=="#"&&(o.attrHandle.href=function(a){return a.getAttribute("href",2)}),a=null}(),c.querySelectorAll&&function(){var a=m,b=c.createElement("div"),d="__sizzle__";b.innerHTML="

    ";if(!b.querySelectorAll||b.querySelectorAll(".TEST").length!==0){m=function(b,e,f,g){e=e||c;if(!g&&!m.isXML(e)){var h=/^(\w+$)|^\.([\w\-]+$)|^#([\w\-]+$)/.exec(b);if(h&&(e.nodeType===1||e.nodeType===9)){if(h[1])return s(e.getElementsByTagName(b),f);if(h[2]&&o.find.CLASS&&e.getElementsByClassName)return s(e.getElementsByClassName(h[2]),f)}if(e.nodeType===9){if(b==="body"&&e.body)return s([e.body],f);if(h&&h[3]){var i=e.getElementById(h[3]);if(!i||!i.parentNode)return s([],f);if(i.id===h[3])return s([i],f)}try{return s(e.querySelectorAll(b),f)}catch(j){}}else if(e.nodeType===1&&e.nodeName.toLowerCase()!=="object"){var k=e,l=e.getAttribute("id"),n=l||d,p=e.parentNode,q=/^\s*[+~]/.test(b);l?n=n.replace(/'/g,"\\$&"):e.setAttribute("id",n),q&&p&&(e=e.parentNode);try{if(!q||p)return s(e.querySelectorAll("[id='"+n+"'] "+b),f)}catch(r){}finally{l||k.removeAttribute("id")}}}return a(b,e,f,g)};for(var e in a)m[e]=a[e];b=null}}(),function(){var a=c.documentElement,b=a.matchesSelector||a.mozMatchesSelector||a.webkitMatchesSelector||a.msMatchesSelector;if(b){var d=!b.call(c.createElement("div"),"div"),e=!1;try{b.call(c.documentElement,"[test!='']:sizzle")}catch(f){e=!0}m.matchesSelector=function(a,c){c=c.replace(/\=\s*([^'"\]]*)\s*\]/g,"='$1']");if(!m.isXML(a))try{if(e||!o.match.PSEUDO.test(c)&&!/!=/.test(c)){var f=b.call(a,c);if(f||!d||a.document&&a.document.nodeType!==11)return f}}catch(g){}return m(c,null,null,[a]).length>0}}}(),function(){var a=c.createElement("div");a.innerHTML="
    ";if(!!a.getElementsByClassName&&a.getElementsByClassName("e").length!==0){a.lastChild.className="e";if(a.getElementsByClassName("e").length===1)return;o.order.splice(1,0,"CLASS"),o.find.CLASS=function(a,b,c){if(typeof b.getElementsByClassName!="undefined"&&!c)return b.getElementsByClassName(a[1])},a=null}}(),c.documentElement.contains?m.contains=function(a,b){return a!==b&&(a.contains?a.contains(b):!0)}:c.documentElement.compareDocumentPosition?m.contains=function(a,b){return!!(a.compareDocumentPosition(b)&16)}:m.contains=function(){return!1},m.isXML=function(a){var b=(a?a.ownerDocument||a:0).documentElement;return b?b.nodeName!=="HTML":!1};var y=function(a,b,c){var d,e=[],f="",g=b.nodeType?[b]:b;while(d=o.match.PSEUDO.exec(a))f+=d[0],a=a.replace(o.match.PSEUDO,"");a=o.relative[a]?a+"*":a;for(var h=0,i=g.length;h0)for(h=g;h=0:f.filter(a,this).length>0:this.filter(a).length>0)},closest:function(a,b){var c=[],d,e,g=this[0];if(f.isArray(a)){var h=1;while(g&&g.ownerDocument&&g!==b){for(d=0;d-1:f.find.matchesSelector(g,a)){c.push(g);break}g=g.parentNode;if(!g||!g.ownerDocument||g===b||g.nodeType===11)break}}c=c.length>1?f.unique(c):c;return this.pushStack(c,"closest",a)},index:function(a){if(!a)return this[0]&&this[0].parentNode?this.prevAll().length:-1;if(typeof a=="string")return f.inArray(this[0],f(a));return f.inArray(a.jquery?a[0]:a,this)},add:function(a,b){var c=typeof a=="string"?f(a,b):f.makeArray(a&&a.nodeType?[a]:a),d=f.merge(this.get(),c);return this.pushStack(S(c[0])||S(d[0])?d:f.unique(d))},andSelf:function(){return this.add(this.prevObject)}}),f.each({parent:function(a){var b=a.parentNode;return b&&b.nodeType!==11?b:null},parents:function(a){return f.dir(a,"parentNode")},parentsUntil:function(a,b,c){return f.dir(a,"parentNode",c)},next:function(a){return f.nth(a,2,"nextSibling")},prev:function(a){return f.nth(a,2,"previousSibling")},nextAll:function(a){return f.dir(a,"nextSibling")},prevAll:function(a){return f.dir(a,"previousSibling")},nextUntil:function(a,b,c){return f.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return f.dir(a,"previousSibling",c)},siblings:function(a){return f.sibling(a.parentNode.firstChild,a)},children:function(a){return f.sibling(a.firstChild)},contents:function(a){return f.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:f.makeArray(a.childNodes)}},function(a,b){f.fn[a]=function(c,d){var e=f.map(this,b,c);L.test(a)||(d=c),d&&typeof d=="string"&&(e=f.filter(d,e)),e=this.length>1&&!R[a]?f.unique(e):e,(this.length>1||N.test(d))&&M.test(a)&&(e=e.reverse());return this.pushStack(e,a,P.call(arguments).join(","))}}),f.extend({filter:function(a,b,c){c&&(a=":not("+a+")");return b.length===1?f.find.matchesSelector(b[0],a)?[b[0]]:[]:f.find.matches(a,b)},dir:function(a,c,d){var e=[],g=a[c];while(g&&g.nodeType!==9&&(d===b||g.nodeType!==1||!f(g).is(d)))g.nodeType===1&&e.push(g),g=g[c];return e},nth:function(a,b,c,d){b=b||1;var e=0;for(;a;a=a[c])if(a.nodeType===1&&++e===b)break;return a},sibling:function(a,b){var c=[];for(;a;a=a.nextSibling)a.nodeType===1&&a!==b&&c.push(a);return c}});var V="abbr|article|aside|audio|canvas|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",W=/ jQuery\d+="(?:\d+|null)"/g,X=/^\s+/,Y=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig,Z=/<([\w:]+)/,$=/",""],legend:[1,"
    ","
    "],thead:[1,"","
    "],tr:[2,"","
    "],td:[3,"","
    "],col:[2,"","
    "],area:[1,"",""],_default:[0,"",""]},bh=U(c);bg.optgroup=bg.option,bg.tbody=bg.tfoot=bg.colgroup=bg.caption=bg.thead,bg.th=bg.td,f.support.htmlSerialize||(bg._default=[1,"div
    ","
    "]),f.fn.extend({text:function(a){if(f.isFunction(a))return this.each(function(b){var c=f(this);c.text(a.call(this,b,c.text()))});if(typeof a!="object"&&a!==b)return this.empty().append((this[0]&&this[0].ownerDocument||c).createTextNode(a));return f.text(this)},wrapAll:function(a){if(f.isFunction(a))return this.each(function(b){f(this).wrapAll(a.call(this,b))});if(this[0]){var b=f(a,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstChild&&a.firstChild.nodeType===1)a=a.firstChild;return a}).append(this)}return this},wrapInner:function(a){if(f.isFunction(a))return this.each(function(b){f(this).wrapInner(a.call(this,b))});return this.each(function(){var b=f(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=f.isFunction(a);return this.each(function(c){f(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){f.nodeName(this,"body")||f(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(a){this.nodeType===1&&this.appendChild(a)})},prepend:function(){return this.domManip(arguments,!0,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this)});if(arguments.length){var a=f.clean(arguments);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this.nextSibling)});if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,f.clean(arguments));return a}},remove:function(a,b){for(var c=0,d;(d=this[c])!=null;c++)if(!a||f.filter(a,[d]).length)!b&&d.nodeType===1&&(f.cleanData(d.getElementsByTagName("*")),f.cleanData([d])),d.parentNode&&d.parentNode.removeChild(d);return this},empty:function() +{for(var a=0,b;(b=this[a])!=null;a++){b.nodeType===1&&f.cleanData(b.getElementsByTagName("*"));while(b.firstChild)b.removeChild(b.firstChild)}return this},clone:function(a,b){a=a==null?!1:a,b=b==null?a:b;return this.map(function(){return f.clone(this,a,b)})},html:function(a){if(a===b)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(W,""):null;if(typeof a=="string"&&!ba.test(a)&&(f.support.leadingWhitespace||!X.test(a))&&!bg[(Z.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Y,"<$1>");try{for(var c=0,d=this.length;c1&&l0?this.clone(!0):this).get();f(e[h])[b](j),d=d.concat(j)}return this.pushStack(d,a,e.selector)}}),f.extend({clone:function(a,b,c){var d,e,g,h=f.support.html5Clone||!bc.test("<"+a.nodeName)?a.cloneNode(!0):bo(a);if((!f.support.noCloneEvent||!f.support.noCloneChecked)&&(a.nodeType===1||a.nodeType===11)&&!f.isXMLDoc(a)){bk(a,h),d=bl(a),e=bl(h);for(g=0;d[g];++g)e[g]&&bk(d[g],e[g])}if(b){bj(a,h);if(c){d=bl(a),e=bl(h);for(g=0;d[g];++g)bj(d[g],e[g])}}d=e=null;return h},clean:function(a,b,d,e){var g;b=b||c,typeof b.createElement=="undefined"&&(b=b.ownerDocument||b[0]&&b[0].ownerDocument||c);var h=[],i;for(var j=0,k;(k=a[j])!=null;j++){typeof k=="number"&&(k+="");if(!k)continue;if(typeof k=="string")if(!_.test(k))k=b.createTextNode(k);else{k=k.replace(Y,"<$1>");var l=(Z.exec(k)||["",""])[1].toLowerCase(),m=bg[l]||bg._default,n=m[0],o=b.createElement("div");b===c?bh.appendChild(o):U(b).appendChild(o),o.innerHTML=m[1]+k+m[2];while(n--)o=o.lastChild;if(!f.support.tbody){var p=$.test(k),q=l==="table"&&!p?o.firstChild&&o.firstChild.childNodes:m[1]===""&&!p?o.childNodes:[];for(i=q.length-1;i>=0;--i)f.nodeName(q[i],"tbody")&&!q[i].childNodes.length&&q[i].parentNode.removeChild(q[i])}!f.support.leadingWhitespace&&X.test(k)&&o.insertBefore(b.createTextNode(X.exec(k)[0]),o.firstChild),k=o.childNodes}var r;if(!f.support.appendChecked)if(k[0]&&typeof (r=k.length)=="number")for(i=0;i=0)return b+"px"}}}),f.support.opacity||(f.cssHooks.opacity={get:function(a,b){return br.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?parseFloat(RegExp.$1)/100+"":b?"1":""},set:function(a,b){var c=a.style,d=a.currentStyle,e=f.isNumeric(b)?"alpha(opacity="+b*100+")":"",g=d&&d.filter||c.filter||"";c.zoom=1;if(b>=1&&f.trim(g.replace(bq,""))===""){c.removeAttribute("filter");if(d&&!d.filter)return}c.filter=bq.test(g)?g.replace(bq,e):g+" "+e}}),f(function(){f.support.reliableMarginRight||(f.cssHooks.marginRight={get:function(a,b){var c;f.swap(a,{display:"inline-block"},function(){b?c=bz(a,"margin-right","marginRight"):c=a.style.marginRight});return c}})}),c.defaultView&&c.defaultView.getComputedStyle&&(bA=function(a,b){var c,d,e;b=b.replace(bs,"-$1").toLowerCase(),(d=a.ownerDocument.defaultView)&&(e=d.getComputedStyle(a,null))&&(c=e.getPropertyValue(b),c===""&&!f.contains(a.ownerDocument.documentElement,a)&&(c=f.style(a,b)));return c}),c.documentElement.currentStyle&&(bB=function(a,b){var c,d,e,f=a.currentStyle&&a.currentStyle[b],g=a.style;f===null&&g&&(e=g[b])&&(f=e),!bt.test(f)&&bu.test(f)&&(c=g.left,d=a.runtimeStyle&&a.runtimeStyle.left,d&&(a.runtimeStyle.left=a.currentStyle.left),g.left=b==="fontSize"?"1em":f||0,f=g.pixelLeft+"px",g.left=c,d&&(a.runtimeStyle.left=d));return f===""?"auto":f}),bz=bA||bB,f.expr&&f.expr.filters&&(f.expr.filters.hidden=function(a){var b=a.offsetWidth,c=a.offsetHeight;return b===0&&c===0||!f.support.reliableHiddenOffsets&&(a.style&&a.style.display||f.css(a,"display"))==="none"},f.expr.filters.visible=function(a){return!f.expr.filters.hidden(a)});var bD=/%20/g,bE=/\[\]$/,bF=/\r?\n/g,bG=/#.*$/,bH=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,bI=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,bJ=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,bK=/^(?:GET|HEAD)$/,bL=/^\/\//,bM=/\?/,bN=/)<[^<]*)*<\/script>/gi,bO=/^(?:select|textarea)/i,bP=/\s+/,bQ=/([?&])_=[^&]*/,bR=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+))?)?/,bS=f.fn.load,bT={},bU={},bV,bW,bX=["*/"]+["*"];try{bV=e.href}catch(bY){bV=c.createElement("a"),bV.href="",bV=bV.href}bW=bR.exec(bV.toLowerCase())||[],f.fn.extend({load:function(a,c,d){if(typeof a!="string"&&bS)return bS.apply(this,arguments);if(!this.length)return this;var e=a.indexOf(" ");if(e>=0){var g=a.slice(e,a.length);a=a.slice(0,e)}var h="GET";c&&(f.isFunction(c)?(d=c,c=b):typeof c=="object"&&(c=f.param(c,f.ajaxSettings.traditional),h="POST"));var i=this;f.ajax({url:a,type:h,dataType:"html",data:c,complete:function(a,b,c){c=a.responseText,a.isResolved()&&(a.done(function(a){c=a}),i.html(g?f("
    ").append(c.replace(bN,"")).find(g):c)),d&&i.each(d,[c,b,a])}});return this},serialize:function(){return f.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?f.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||bO.test(this.nodeName)||bI.test(this.type))}).map(function(a,b){var c=f(this).val();return c==null?null:f.isArray(c)?f.map(c,function(a,c){return{name:b.name,value:a.replace(bF,"\r\n")}}):{name:b.name,value:c.replace(bF,"\r\n")}}).get()}}),f.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(a,b){f.fn[b]=function(a){return this.on(b,a)}}),f.each(["get","post"],function(a,c){f[c]=function(a,d,e,g){f.isFunction(d)&&(g=g||e,e=d,d=b);return f.ajax({type:c,url:a,data:d,success:e,dataType:g})}}),f.extend({getScript:function(a,c){return f.get(a,b,c,"script")},getJSON:function(a,b,c){return f.get(a,b,c,"json")},ajaxSetup:function(a,b){b?b_(a,f.ajaxSettings):(b=a,a=f.ajaxSettings),b_(a,b);return a},ajaxSettings:{url:bV,isLocal:bJ.test(bW[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":bX},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":a.String,"text html":!0,"text json":f.parseJSON,"text xml":f.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:bZ(bT),ajaxTransport:bZ(bU),ajax:function(a,c){function w(a,c,l,m){if(s!==2){s=2,q&&clearTimeout(q),p=b,n=m||"",v.readyState=a>0?4:0;var o,r,u,w=c,x=l?cb(d,v,l):b,y,z;if(a>=200&&a<300||a===304){if(d.ifModified){if(y=v.getResponseHeader("Last-Modified"))f.lastModified[k]=y;if(z=v.getResponseHeader("Etag"))f.etag[k]=z}if(a===304)w="notmodified",o=!0;else try{r=cc(d,x),w="success",o=!0}catch(A){w="parsererror",u=A}}else{u=w;if(!w||a)w="error",a<0&&(a=0)}v.status=a,v.statusText=""+(c||w),o?h.resolveWith(e,[r,w,v]):h.rejectWith(e,[v,w,u]),v.statusCode(j),j=b,t&&g.trigger("ajax"+(o?"Success":"Error"),[v,d,o?r:u]),i.fireWith(e,[v,w]),t&&(g.trigger("ajaxComplete",[v,d]),--f.active||f.event.trigger("ajaxStop"))}}typeof a=="object"&&(c=a,a=b),c=c||{};var d=f.ajaxSetup({},c),e=d.context||d,g=e!==d&&(e.nodeType||e instanceof f)?f(e):f.event,h=f.Deferred(),i=f.Callbacks("once memory"),j=d.statusCode||{},k,l={},m={},n,o,p,q,r,s=0,t,u,v={readyState:0,setRequestHeader:function(a,b){if(!s){var c=a.toLowerCase();a=m[c]=m[c]||a,l[a]=b}return this},getAllResponseHeaders:function(){return s===2?n:null},getResponseHeader:function(a){var c;if(s===2){if(!o){o={};while(c=bH.exec(n))o[c[1].toLowerCase()]=c[2]}c=o[a.toLowerCase()]}return c===b?null:c},overrideMimeType:function(a){s||(d.mimeType=a);return this},abort:function(a){a=a||"abort",p&&p.abort(a),w(0,a);return this}};h.promise(v),v.success=v.done,v.error=v.fail,v.complete=i.add,v.statusCode=function(a){if(a){var b;if(s<2)for(b in a)j[b]=[j[b],a[b]];else b=a[v.status],v.then(b,b)}return this},d.url=((a||d.url)+"").replace(bG,"").replace(bL,bW[1]+"//"),d.dataTypes=f.trim(d.dataType||"*").toLowerCase().split(bP),d.crossDomain==null&&(r=bR.exec(d.url.toLowerCase()),d.crossDomain=!(!r||r[1]==bW[1]&&r[2]==bW[2]&&(r[3]||(r[1]==="http:"?80:443))==(bW[3]||(bW[1]==="http:"?80:443)))),d.data&&d.processData&&typeof d.data!="string"&&(d.data=f.param(d.data,d.traditional)),b$(bT,d,c,v);if(s===2)return!1;t=d.global,d.type=d.type.toUpperCase(),d.hasContent=!bK.test(d.type),t&&f.active++===0&&f.event.trigger("ajaxStart");if(!d.hasContent){d.data&&(d.url+=(bM.test(d.url)?"&":"?")+d.data,delete d.data),k=d.url;if(d.cache===!1){var x=f.now(),y=d.url.replace(bQ,"$1_="+x);d.url=y+(y===d.url?(bM.test(d.url)?"&":"?")+"_="+x:"")}}(d.data&&d.hasContent&&d.contentType!==!1||c.contentType)&&v.setRequestHeader("Content-Type",d.contentType),d.ifModified&&(k=k||d.url,f.lastModified[k]&&v.setRequestHeader("If-Modified-Since",f.lastModified[k]),f.etag[k]&&v.setRequestHeader("If-None-Match",f.etag[k])),v.setRequestHeader("Accept",d.dataTypes[0]&&d.accepts[d.dataTypes[0]]?d.accepts[d.dataTypes[0]]+(d.dataTypes[0]!=="*"?", "+bX+"; q=0.01":""):d.accepts["*"]);for(u in d.headers)v.setRequestHeader(u,d.headers[u]);if(d.beforeSend&&(d.beforeSend.call(e,v,d)===!1||s===2)){v.abort();return!1}for(u in{success:1,error:1,complete:1})v[u](d[u]);p=b$(bU,d,c,v);if(!p)w(-1,"No Transport");else{v.readyState=1,t&&g.trigger("ajaxSend",[v,d]),d.async&&d.timeout>0&&(q=setTimeout(function(){v.abort("timeout")},d.timeout));try{s=1,p.send(l,w)}catch(z){if(s<2)w(-1,z);else throw z}}return v},param:function(a,c){var d=[],e=function(a,b){b=f.isFunction(b)?b():b,d[d.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};c===b&&(c=f.ajaxSettings.traditional);if(f.isArray(a)||a.jquery&&!f.isPlainObject(a))f.each(a,function(){e(this.name,this.value)});else for(var g in a)ca(g,a[g],c,e);return d.join("&").replace(bD,"+")}}),f.extend({active:0,lastModified:{},etag:{}});var cd=f.now(),ce=/(\=)\?(&|$)|\?\?/i;f.ajaxSetup({jsonp:"callback",jsonpCallback:function(){return f.expando+"_"+cd++}}),f.ajaxPrefilter("json jsonp",function(b,c,d){var e=b.contentType==="application/x-www-form-urlencoded"&&typeof b.data=="string";if(b.dataTypes[0]==="jsonp"||b.jsonp!==!1&&(ce.test(b.url)||e&&ce.test(b.data))){var g,h=b.jsonpCallback=f.isFunction(b.jsonpCallback)?b.jsonpCallback():b.jsonpCallback,i=a[h],j=b.url,k=b.data,l="$1"+h+"$2";b.jsonp!==!1&&(j=j.replace(ce,l),b.url===j&&(e&&(k=k.replace(ce,l)),b.data===k&&(j+=(/\?/.test(j)?"&":"?")+b.jsonp+"="+h))),b.url=j,b.data=k,a[h]=function(a){g=[a]},d.always(function(){a[h]=i,g&&f.isFunction(i)&&a[h](g[0])}),b.converters["script json"]=function(){g||f.error(h+" was not called");return g[0]},b.dataTypes[0]="json";return"script"}}),f.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(a){f.globalEval(a);return a}}}),f.ajaxPrefilter("script",function(a){a.cache===b&&(a.cache=!1),a.crossDomain&&(a.type="GET",a.global=!1)}),f.ajaxTransport("script",function(a){if(a.crossDomain){var d,e=c.head||c.getElementsByTagName("head")[0]||c.documentElement;return{send:function(f,g){d=c.createElement("script"),d.async="async",a.scriptCharset&&(d.charset=a.scriptCharset),d.src=a.url,d.onload=d.onreadystatechange=function(a,c){if(c||!d.readyState||/loaded|complete/.test(d.readyState))d.onload=d.onreadystatechange=null,e&&d.parentNode&&e.removeChild(d),d=b,c||g(200,"success")},e.insertBefore(d,e.firstChild)},abort:function(){d&&d.onload(0,1)}}}});var cf=a.ActiveXObject?function(){for(var a in ch)ch[a](0,1)}:!1,cg=0,ch;f.ajaxSettings.xhr=a.ActiveXObject?function(){return!this.isLocal&&ci()||cj()}:ci,function(a){f.extend(f.support,{ajax:!!a,cors:!!a&&"withCredentials"in a})}(f.ajaxSettings.xhr()),f.support.ajax&&f.ajaxTransport(function(c){if(!c.crossDomain||f.support.cors){var d;return{send:function(e,g){var h=c.xhr(),i,j;c.username?h.open(c.type,c.url,c.async,c.username,c.password):h.open(c.type,c.url,c.async);if(c.xhrFields)for(j in c.xhrFields)h[j]=c.xhrFields[j];c.mimeType&&h.overrideMimeType&&h.overrideMimeType(c.mimeType),!c.crossDomain&&!e["X-Requested-With"]&&(e["X-Requested-With"]="XMLHttpRequest");try{for(j in e)h.setRequestHeader(j,e[j])}catch(k){}h.send(c.hasContent&&c.data||null),d=function(a,e){var j,k,l,m,n;try{if(d&&(e||h.readyState===4)){d=b,i&&(h.onreadystatechange=f.noop,cf&&delete ch[i]);if(e)h.readyState!==4&&h.abort();else{j=h.status,l=h.getAllResponseHeaders(),m={},n=h.responseXML,n&&n.documentElement&&(m.xml=n),m.text=h.responseText;try{k=h.statusText}catch(o){k=""}!j&&c.isLocal&&!c.crossDomain?j=m.text?200:404:j===1223&&(j=204)}}}catch(p){e||g(-1,p)}m&&g(j,k,m,l)},!c.async||h.readyState===4?d():(i=++cg,cf&&(ch||(ch={},f(a).unload(cf)),ch[i]=d),h.onreadystatechange=d)},abort:function(){d&&d(0,1)}}}});var ck={},cl,cm,cn=/^(?:toggle|show|hide)$/,co=/^([+\-]=)?([\d+.\-]+)([a-z%]*)$/i,cp,cq=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]],cr;f.fn.extend({show:function(a,b,c){var d,e;if(a||a===0)return this.animate(cu("show",3),a,b,c);for(var g=0,h=this.length;g=i.duration+this.startTime){this.now=this.end,this.pos=this.state=1,this.update(),i.animatedProperties[this.prop]=!0;for(b in i.animatedProperties)i.animatedProperties[b]!==!0&&(g=!1);if(g){i.overflow!=null&&!f.support.shrinkWrapBlocks&&f.each(["","X","Y"],function(a,b){h.style["overflow"+b]=i.overflow[a]}),i.hide&&f(h).hide();if(i.hide||i.show)for(b in i.animatedProperties)f.style(h,b,i.orig[b]),f.removeData(h,"fxshow"+b,!0),f.removeData(h,"toggle"+b,!0);d=i.complete,d&&(i.complete=!1,d.call(h))}return!1}i.duration==Infinity?this.now=e:(c=e-this.startTime,this.state=c/i.duration,this.pos=f.easing[i.animatedProperties[this.prop]](this.state,c,0,1,i.duration),this.now=this.start+(this.end-this.start)*this.pos),this.update();return!0}},f.extend(f.fx,{tick:function(){var a,b=f.timers,c=0;for(;c-1,k={},l={},m,n;j?(l=e.position(),m=l.top,n=l.left):(m=parseFloat(h)||0,n=parseFloat(i)||0),f.isFunction(b)&&(b=b.call(a,c,g)),b.top!=null&&(k.top=b.top-g.top+m),b.left!=null&&(k.left=b.left-g.left+n),"using"in b?b.using.call(a,k):e.css(k)}},f.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),c=this.offset(),d=cx.test(b[0].nodeName)?{top:0,left:0}:b.offset();c.top-=parseFloat(f.css(a,"marginTop"))||0,c.left-=parseFloat(f.css(a,"marginLeft"))||0,d.top+=parseFloat(f.css(b[0],"borderTopWidth"))||0,d.left+=parseFloat(f.css(b[0],"borderLeftWidth"))||0;return{top:c.top-d.top,left:c.left-d.left}},offsetParent:function(){return this.map(function(){var a=this.offsetParent||c.body;while(a&&!cx.test(a.nodeName)&&f.css(a,"position")==="static")a=a.offsetParent;return a})}}),f.each(["Left","Top"],function(a,c){var d="scroll"+c;f.fn[d]=function(c){var e,g;if(c===b){e=this[0];if(!e)return null;g=cy(e);return g?"pageXOffset"in g?g[a?"pageYOffset":"pageXOffset"]:f.support.boxModel&&g.document.documentElement[d]||g.document.body[d]:e[d]}return this.each(function(){g=cy(this),g?g.scrollTo(a?f(g).scrollLeft():c,a?c:f(g).scrollTop()):this[d]=c})}}),f.each(["Height","Width"],function(a,c){var d=c.toLowerCase();f.fn["inner"+c]=function(){var a=this[0];return a?a.style?parseFloat(f.css(a,d,"padding")):this[d]():null},f.fn["outer"+c]=function(a){var b=this[0];return b?b.style?parseFloat(f.css(b,d,a?"margin":"border")):this[d]():null},f.fn[d]=function(a){var e=this[0];if(!e)return a==null?null:this;if(f.isFunction(a))return this.each(function(b){var c=f(this);c[d](a.call(this,b,c[d]()))});if(f.isWindow(e)){var g=e.document.documentElement["client"+c],h=e.document.body;return e.document.compatMode==="CSS1Compat"&&g||h&&h["client"+c]||g}if(e.nodeType===9)return Math.max(e.documentElement["client"+c],e.body["scroll"+c],e.documentElement["scroll"+c],e.body["offset"+c],e.documentElement["offset"+c]);if(a===b){var i=f.css(e,d),j=parseFloat(i);return f.isNumeric(j)?j:i}return this.css(d,typeof a=="string"?a:a+"px")}}),a.jQuery=a.$=f,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return f})})(window); \ No newline at end of file diff --git a/src/dashboard/src/media/vendor/less.js/LICENSE b/src/dashboard/src/media/vendor/less.js/LICENSE new file mode 100644 index 0000000000..40f3b781b3 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/LICENSE @@ -0,0 +1,179 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +Copyright (c) 2009-2010 Alexis Sellier diff --git a/src/dashboard/src/media/vendor/less.js/Makefile b/src/dashboard/src/media/vendor/less.js/Makefile new file mode 100644 index 0000000000..8baea4fa18 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/Makefile @@ -0,0 +1,69 @@ +# +# Run all tests +# +test: + node test/less-test.js + +# +# Run benchmark +# +benchmark: + node benchmark/less-benchmark.js + +# +# Build less.js +# +SRC = lib/less +HEADER = build/header.js +VERSION = `cat package.json | grep version \ + | grep -o '[0-9]\.[0-9]\.[0-9]\+'` +DIST = dist/less-${VERSION}.js +RHINO = dist/less-rhino-${VERSION}.js +DIST_MIN = dist/less-${VERSION}.min.js + +less: + @@mkdir -p dist + @@touch ${DIST} + @@cat ${HEADER} | sed s/@VERSION/${VERSION}/ > ${DIST} + @@echo "(function (window, undefined) {" >> ${DIST} + @@cat build/require.js\ + build/ecma-5.js\ + ${SRC}/parser.js\ + ${SRC}/functions.js\ + ${SRC}/tree/*.js\ + ${SRC}/tree.js\ + ${SRC}/browser.js >> ${DIST} + @@echo "})(window);" >> ${DIST} + @@echo ${DIST} built. + +rhino: + @@mkdir -p dist + @@touch ${RHINO} + @@cat build/require-rhino.js\ + build/ecma-5.js\ + ${SRC}/parser.js\ + ${SRC}/functions.js\ + ${SRC}/tree/*.js\ + ${SRC}/tree.js\ + ${SRC}/rhino.js > ${RHINO} + @@echo ${RHINO} built. + +min: less + @@echo minifying... + @@cat ${HEADER} | sed s/@VERSION/${VERSION}/ > ${DIST_MIN} + @@uglifyjs ${DIST} >> ${DIST_MIN} + +clean: + git rm dist/* + +dist: clean min + git add dist/* + git commit -a -m "(dist) build ${VERSION}" + git archive master --prefix=less/ -o less-${VERSION}.tar.gz + npm publish less-${VERSION}.tar.gz + +stable: + npm tag less ${VERSION} stable + + +.PHONY: test benchmark diff --git a/src/dashboard/src/media/vendor/less.js/README.md b/src/dashboard/src/media/vendor/less.js/README.md new file mode 100644 index 0000000000..726d6910f8 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/README.md @@ -0,0 +1,20 @@ +less.js +======= + +The **dynamic** stylesheet language. + + + +about +----- + +This is the JavaScript, and now official, stable version of LESS. + +For more information, visit . + +license +------- + +See `LICENSE` file. + +> Copyright (c) 2009-2011 Alexis Sellier diff --git a/src/dashboard/src/media/vendor/less.js/benchmark/benchmark.less b/src/dashboard/src/media/vendor/less.js/benchmark/benchmark.less new file mode 100644 index 0000000000..6dd3d972f0 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/benchmark/benchmark.less @@ -0,0 +1,3979 @@ +@bg: #f01; +@white: #fff; +@grey: #eee; +@black: #000; +@blue: #000; +@accent_colour: #000; +@light_grey: #eee; +@dark_grey: #eee; +@yellow: #422; +@red: #ff0000; +@colour_positive: #ff0000; +@colour_negative: #ff0000; + +.box_shadow () { +} +.text_shadow () { +} +.border_radius () { +} +.border_radius_top_left () { +} +.border_radius_top_right () { +} +.border_radius_bottom_right () { +} +.border_radius_bottom_left () { +} +.border_radius_top () { +} +.border_radius_right () { +} +.border_radius_bottom () { +} +.border_radius_left () { +} +div.browse { + margin: 0 0 20px; + &.class { + padding: 0; + } + div.header { + padding: 10px 10px 9px; text-align: left; background: @bg url('/images/panel_header_bg.png') repeat-x top left; + border-bottom: 1px solid (@bg * 0.66 + @black * 0.33); line-height: 1; height: 18px; + .border_radius_top(3); color: @light_grey; + h3 { font-size: 16px; margin: 0; color: @white; .text_shadow(1, 1, 0, @bg * 0.66 + @black * 0.33); } + span.filter { + float: left; display: block; overflow: hidden; position: relative; z-index: 5; + a { + margin: 0 1px 0 0; display: block; float: left; padding: 0 8px; height: 18px; font-weight: bold; font-size: 10px; line-height: 18px; + text-transform: uppercase; background: url('/images/transparent_backgrounds/black_50.png'); color: @light_grey; text-decoration: none; position: relative; z-index: 3; + .active { + background: @white; color: @black; z-index: 4; + :hover { color: @black; } + } + :hover { color: @white; } + :first-child { .border_radius_left(2); } + :last-child { .border_radius_right(2); margin-right: 0; } + } + } + + span.filter.dropdown { + margin: 0; position: relative; overflow: visible; + a { + .border_radius(2); background: @white; color: @black; margin: 0; position: relative; padding-right: 25px; + img { float: left; margin: 4px 5px 0 0; } + b.arrow { + float: right; display: block; height: 0; width: 0; border: 5px solid transparent; border-top: 5px solid @black; border-bottom: none; + position: absolute; top: 6px; right: 10px; + } + :hover { + background: @accent_colour; color: @white; + b.arrow { border-top: 5px solid @white; } + } + } + ul { + position: absolute; top: 100%; left: 0; margin: 1px 0 0; padding: 0; background: @white; .border_radius(2); + .box_shadow(0, 1, 1, @black); + li { + list-style: none; display: block; padding: 0; margin: 0; + a { + display: block; height: 18px; line-height: 18px; color: @black; font-size: 10px; text-transform: uppercase; background: transparent; + border-bottom: 1px solid (@light_grey * 0.66 + @white * 0.33); float: none; margin: 0; .border_radius(0); white-space: nowrap; + :hover { background: url('/images/transparent_backgrounds/accent_colour_25.png'); color: @black; } + } + :last-child { + a { border: none; } + } + } + } + } + span.filter.dropdown.sort { float: left; margin: 0 0 0 10px; } + span.filter.dropdown.localisation { float: left; margin: 0 0 0 10px; } + a.more { + float: right; color: @white; .text_shadow(1, 1, 0, @bg * 0.66 + @black * 0.33); font-size: 14px; font-weight: bold; + position: relative; top: 2px; + :hover { text-decoration: none; } + } + } + > ul { + margin: 0; background: @white; padding: 10px 0 0 10px; .border_radius(3); position: relative; + li { + display: block; float: left; list-style: none; margin: 0 10px 10px 0; padding: 5px; position: relative; + background: @white; width: 130px; border: 1px solid (@light_grey * 0.33 + @white * 0.66); .border_radius(2); + a.remove { + position: absolute; height: 16px; width: 16px; padding: 3px; background: @accent_colour; + .border_radius(99); display: none; z-index: 3; top: -8px; right: -8px; + img { vertical-align: middle; } + } + div.thumbnail { + .border_radius_top(3); position: relative; z-index: 3; + .marker { + position: absolute; padding: 2px; .border_radius(2); z-index: 3; + background: url('/images/transparent_backgrounds/white_75.png'); height: 12px; width: 12px; + } + .marker.coupon { + height: auto; width: auto; top: 10px; right: -3px; padding: 0; background: transparent; overflow: hidden; position: absolute; + b { + display: block; height: 0; width: 0; float: left; border: 14px solid transparent; border-top: 14px solid @accent_colour; + border-bottom: none; border-right: none; float: left; + } + span { + color: @white; font-size: 10px; font-weight: bold; text-transform: uppercase; height: 14px; line-height: 14px; display: block; + padding: 0 4px 0 2px; background: @accent_colour; .text_shadow(1, 1, 0px, (@accent_colour * 0.75 + @black * 0.25)); margin: 0 0 0 14px; + } + } + .marker.video { + position: absolute; left: 50%; top: 50%; background: @white; width: 10px; height: 10px; + b { display: block; width: 0; height: 0; border: 5px solid transparent; border-left: 10px solid @black; border-right: none; } + } + .marker.endorsed_by_me { background: none; padding: 0; right: 0; bottom: -32px; .border_radius(2); background: @white; } + a.thumbnail { + display: block; overflow: hidden; position: relative; text-align: center; + img { position: relative; display: block; margin: auto; } + } + } + div.text { + margin: 3px 0 0; display: block; + a { text-decoration: none; } + a.title { + display: block; text-decoration: none; font-weight: bold; font-size: 12px; line-height: 16px; + white-space: nowrap; height: 16px; overflow: hidden; + :before { + display: block; height: 32px; width: 20px; content: " "; float: right; right: -15px; top: -8px; + background: @white; position: relative; z-index: 1; .box_shadow(-5, 0, 10, @white); + } + } + small { + font-size: 11px; line-height: 13px; color: @grey; display: block; height: 13px; overflow: hidden; white-space: nowrap; + a { font-weight: bold; } + :before { + display: block; height: 32px; width: 20px; content: " "; float: right; right: -15px; top: -8px; + background: @white; position: relative; z-index: 1; .box_shadow(-5, 0, 10, @white); + } + } + } + :hover { + background: @accent_colour; + a.remove { display: block; } + div.thumbnail { + a.marker.remove, a.marker.video { + b { display: inline-block; } + } + a.marker.video { .box_shadow(0, 0, 2, @black); } + } + div.text { + a { color: @white; } + a.title:before { background: @accent_colour; .box_shadow(-5, 0, 10, @accent_colour); } + small { + color: @white * 0.75 + @accent_colour * 0.25; + :before { background: @accent_colour; .box_shadow(-5, 0, 10, @accent_colour); } + } + } + div.footer a { color: @white; } + } + } + > li.ad div.thumbnail a.thumbnail { + width: 130px; height: 97px; + img { width: 100%; height: 100%; } + } + > li.brand div.thumbnail a.thumbnail { + width: 120px; height: 87px; padding: 5px; background: @white; .border_radius(2); + img { max-width: 120px; max-height: 87px; } + } + li.paginate { + margin-bottom: 0; + a { + display: block; position: relative; text-decoration: none; height: 131px; + div.arrow { + background: #81c153 url('/images/button_bg.png') repeat-x left top; border: 1px solid (@accent_colour * 0.75 + @black * 0.25); + height: 44px; .border_radius(99); width: 44px; margin: 0 auto; position: relative; top: 32px; + b { text-indent: -9000px; display: block; border: 10px solid transparent; width: 0; height: 0; position: relative; top: 12px; } + } + div.label { + position: absolute; bottom: 5px; left: 0; right: 0; line-height: 13px; + color: @accent_colour * 0.85 + @black * 0.15; text-decoration: none; + font-weight: bold; font-size: 12px; text-align: center; + } + :hover { + div.arrow { background: #abd56e url('/images/button_bg.png') repeat-x left -44px; } + } + } + :hover { background: transparent; } + } + li.paginate.previous a div b { border-right: 15px solid @white; border-left: none; left: 12px; } + li.paginate.next a div b { border-left: 15px solid @white; border-right: none; left: 16px; } + } + > div.footer { + padding: 9px 10px 10px; background: @light_grey * 0.75 + @white * 0.25; overflow: hidden; + border-top: 1px solid @light_grey; .border_radius_bottom(3); + div.info { + float: left; color: @grey; + strong { color: @black; font-weight: normal; } + } + div.pagination { + float: right; + > * { + display: inline-block; line-height: 1; padding: 0 6px; line-height: 18px; height: 18px; background: @white; + .border_radius(3); text-decoration: none; font-weight: bold; + font-size: 10px; text-transform: uppercase; + } + a { color: @grey; } + a:hover { color: @black; } + span.disabled { color: @light_grey; } + span.current { color: @white; background: @bg; border: none; } + span.current:hover { color: @white; } + } + } +} +div.browse.with_categories { margin: 0 0 0 160px; } +div.browse.with_options > ul { .border_radius_top(0); } +div.browse.with_footer > ul { .border_radius_bottom(0); } +/* Browse List */ +div.browse.list { +> ul { + margin: 0; min-height: 320px; + padding: 10px 0 0 10px; overflow: hidden; + > li { + display: block; list-style: none; margin: 0 10px 10px 0; padding: 5px; + .border_radius(3); position: relative; line-height: normal; + .marker { + position: absolute; padding: 2px; .border_radius(2); + background: url('/images/transparent_backgrounds/white_75.png'); + img { height: 12px; width: 12px; } + } + img.marker { height: 12px; width: 12px; } + span.marker.new { + color: black; left: -5px; top: -5px; background: none; background-color: @white * 0.1 + @yellow * 0.6 + @red * 0.3; line-height: 1; padding: 2px 5px; + font-weight: bold; + } + a.marker.media_type { + display: inline-block; text-decoration: none; top: 39px; left: 8px; + font-size: 10px; + b { font-weight: normal; margin: 0 0 0 2px; line-height: 1; display: none; } + img { vertical-align: middle; } + } + a.thumbnail { + float: left; + width: 68px; display: block; overflow: hidden; + border: 1px solid @light_grey; + :hover { border-color: @accent_colour; } + } + span.title_brand { + display: block; margin: 0 0 2px 75px; + a { margin: 0; display: inline; } + a.brand_name { font-weight: normal; font-size: 12px; } + } + a.ad_title { + font-weight: bold; font-size: 14px; margin: 0 0 0 75px; display: block; + } + a.brand_name { + font-weight: bold; font-size: 14px; margin: 0 0 0 75px; display: block; + } + small { + display: block; color: @grey; margin: 0 0 0 75px; font-size: 12px; + } + small.brand_name { display: inline; margin: 0; } + ul.chart { + margin: 0 0 0 80px; + height: 39px; + } + ul.networks { + margin: 3px 0 0 75px; padding: 0; overflow: hidden; + li { display: block; float: left; margin: 0 5px 0 0; line-height: 1; } + } + div.points { + display: none; + font-size: 12px; text-align: right; + label { color: @grey; } + } + a.remove { bottom: -3px; right: -3px; } + } + li.ad { + a.thumbnail { height: 51px; } + span.title_brand { + small.brand_name { + display: block; + } + } + } + li.brand { + a.thumbnail { height: 68px; } + } + } +} +div.browse.list.with_options ul { .border_radius_top(0); } +div.browse.list.with_footer ul { .border_radius_bottom(0); } +div.browse.list.cols_2 { + > ul { + > li { + width: 285px; float: left; + :hover { + background: @white; + } + } + } +} +div.browse.ads.list { + > ul { + > li { + height: 53px; + a.thumbnail { + height: 51px; + } + } + } +} +div.browse.brands.list { + > ul { + > li { + height: 68px; + a.thumbnail { + height: 66px; + } + } + } +} + +/* Categories List */ +#categories { + margin: 40px 0 0; width: 160px; float: left; position: relative; z-index: 1; + ul { + margin: 0; padding: 10px 0 0; + li { + list-style: none; margin: 0; padding: 0; font-size: 14px; + a { color: @grey; display: block; padding: 5px 10px 5px 15px; text-decoration: none; .border_radius_left(3); } + a:hover { color: @black; background: @light_grey * 0.15 + @white * 0.85; } + } + .all a { font-weight: bold; } + .current a { + background: @white; color: @black; border: 1px solid (@light_grey * 0.25 + @white * 0.75); border-right: none; border-left: 5px solid @bg; + padding-left: 10px; + } + } +} + +/* Ads > Show */ +#ad { + div.header { + overflow: hidden; + h3 { font-size: 16px; margin: 0 0 3px; } + small { + a.category { font-weight: bold; color: @accent_colour; } + span.networks img { position: relative; top: 3px; } + } + span.brand { + float: right; color: @white; + a.brand_name { font-weight: bold; color: @accent_colour; } + } + } + div.content { + padding: 0; position: relative; + a.toggle_size { + display: block; .border_radius(3); background-color: @black; padding: 0 5px 0 26px; + background-position: 5px center; background-repeat: no-repeat; text-decoration: none; margin: 5px 5px 0 0; + position: absolute; top: 0; right: 0; line-height: 25px; z-index: 45; + } + img.creative { margin: 0 auto; max-width: 540px; display: block; } + object { position: relative; z-index: 44; } + object.video { line-height: 0; font-size: 0; } + object embed { position: relative; z-index: 45; line-height: 0; font-size: 0; } + } + div.content.not_video { + padding: 40px; text-align: center; + * { margin-left: auto; margin-right: auto; } + object.flash { margin-bottom: 0; } + } + div.footer { + padding: 0; + div.vote_views { + padding: 5px 10px; overflow: hidden; + div.share { float: right; margin: 2px 0 0 0; } + #login_register_msg, #encourage_vote_msg { line-height: 22px; font-weight: bold; color: @black; } + } + } +} +#sidebar { + #meta { + table { + margin: 0; + tr:last-child td { padding-bottom: 0; } + td { + padding: 0 0 5px; + ul.networks { + margin: 0; padding: 0; + li { + list-style: none; display: inline; + } + li { + } + } + } + td.label { color: @grey; white-space: nowrap; width: 1%; text-align: right; padding-right: 5px; } + } + } +} + +/* Voting */ +div.voted { + font-size: 12px; line-height: 22px; color: @black; display: inline-block; font-weight: bold; + img { float: left; margin-right: 5px; padding: 3px; .border_radius(3); } +} +#voted_up { + img { background: @colour_positive * 0.66 + @bg * 0.15; } +} +#voted_down { + img { background: @colour_negative * 0.66 + @bg * 0.15; } +} +#encourage_comment { + display: inline-block; line-height: 22px; font-weight: bold; +} +#vote { + overflow: hidden; font-size: 12px; line-height: 22px; color: @black; float: left; + a { + color: @white; font-weight: bold; overflow: hidden; display: block; + width: 16px; text-decoration: none; text-align: center; font-size: 10px; padding: 3px; text-transform: uppercase; + } + a.up { + float: left; background: @colour_positive * 0.66 + @bg * 0.15; .border_radius_left(3); + :hover { background: @colour_positive * 0.85 + @bg * 0.15; } + } + a.down { + float: left; background: @colour_negative * 0.66 + @bg * 0.15; .border_radius_right(3); + margin: 0 5px 0 1px; + :hover { background: @colour_negative * 0.85 + @bg * 0.15; } + } +} +#vote.disabled { + a.up { + background: (@colour_positive * 0.66 + @bg * 0.15) * 0.15 + @grey * 0.85; + :hover { background: (@colour_positive * 0.85 + @bg * 0.15) * 0.25 + @grey * 0.75; } + } + a.down { + background: (@colour_negative * 0.66 + @bg * 0.15) * 0.15 + @grey * 0.85; + :hover { background: (@colour_negative * 0.85 + @bg * 0.15) * 0.25 + @grey * 0.75; } + } +} + +/* Panels */ +div.panel { + margin: 0 0 20px; position: relative; .box_shadow(0, 0, 3, @light_grey * 0.66 + @white * 0.33); .border_radius(3); + > div.header { + background: @bg url('/images/panel_header_bg.png') repeat-x top left; border-bottom: 1px solid (@bg * 0.66 + @black * 0.33); + padding: 5px 10px 4px; .border_radius_top(3); min-height: 18px; + h2 { font-size: 16px; margin: 0; color: @white; .text_shadow(1, 1, 0, @bg * 0.66 + @black * 0.33); } + h3 { color: @white; font-size: 14px; margin: 0; line-height: 18px; .text_shadow(1, 1, 0, @bg * 0.66 + @black * 0.33); } + small { display: block; font-size: 12px; color: @light_grey * 0.25 + @white * 0.75; } + span.filter { + float: left; display: block; overflow: hidden; position: relative; z-index: 5; + a { + margin: 0 1px 0 0; display: block; float: left; padding: 0 8px; height: 18px; font-weight: bold; font-size: 10px; line-height: 18px; + text-transform: uppercase; background: url('/images/transparent_backgrounds/black_50.png'); color: @light_grey; text-decoration: none; position: relative; z-index: 3; + } + a:first-child { .border_radius_left(2); } + a:last-child { .border_radius_right(2); margin-right: 0; } + a.active { background: @white; color: @black; z-index: 4; } + a:hover { color: @white; } + a.active:hover { color: @black; } + } + + span.filter.dropdown { + margin: 0; position: relative; overflow: visible; + a { + .border_radius(2); background: @white; color: @black; margin: 0; position: relative; padding-right: 25px; + img { float: left; margin: 4px 5px 0 0; } + b.arrow { + float: right; display: block; height: 0; width: 0; border: 5px solid transparent; border-top: 5px solid @black; border-bottom: none; + position: absolute; top: 6px; right: 10px; + } + :hover { + background: @accent_colour; color: @white; + b.arrow { border-top: 5px solid @white; } + } + } + + ul { + position: absolute; top: 100%; left: 0; margin: 1px 0 0; padding: 0; background: @white; .border_radius(2); + .box_shadow(0, 1, 1, @black); + li { + list-style: none; display: block; padding: 0; margin: 0; + a { + display: block; height: 18px; line-height: 18px; color: @black; font-size: 10px; text-transform: uppercase; background: transparent; + border-bottom: 1px solid (@light_grey * 0.66 + @white * 0.33); float: none; margin: 0; .border_radius(0); white-space: nowrap; + :hover { background: url('/images/transparent_backgrounds/accent_colour_25.png'); color: @black; } + } + } + li:last-child { + a { border: none; } + } + } + } + span.filter.dropdown.sort { float: left; margin: 0 0 0 10px; } + span.filter.dropdown.localisation { float: left; margin: 0 0 0 10px; } + + a.more { + float: right; color: @white; .text_shadow(1, 1, 0, @bg * 0.66 + @black * 0.33); font-size: 14px; font-weight: bold; + position: relative; top: 2px; + :hover { text-decoration: none; } + } + } + > div.content { + background: @white; padding: 10px; + .no_padding { padding: 0; } + } + > div.footer { + background: @light_grey * 0.33 + @white * 0.66; border-top: 1px solid (@light_grey * 0.5 + @white * 0.5); + padding: 4px 10px 5px; .border_radius_bottom(3); + } +} +div.panel.no_footer div.content { .border_radius_bottom(3); } +div.panel.no_header div.content { .border_radius_top(3); } +div.panel.collapsable { + div.header { + cursor: pointer; + b.toggle { float: right; border: 5px solid transparent; border-bottom: 5px solid @white; border-top: none; display: block; width: 0; height: 0; margin: 6px 0 0 0; } + } + div.header:hover { + background-color: @bg * 0.75 + @white * 0.25; + } +} +div.panel.collapsed { + div.header { + border-bottom: none; .border_radius(3); + b.toggle { border-bottom: none; border-top: 5px solid @white; } + } + div.blank { border-bottom: none; .border_radius_bottom(3); } + div.content, div.footer { display: none; } +} + + +/* Sidebar Actions */ +#sidebar { + #actions { + .box_shadow(0, 0, 0, transparent); + div.content { + background: url('/images/transparent_backgrounds/accent_colour_10.png'); text-align: center; + p.endorsement { + margin: 0 0 10px; font-size: 14px; font-weight: bold; + small { font-weight: normal; line-height: inherit; margin: 10px 0 0; } + :last-child { margin: 0; } + } + div.share { margin: 5px 0 0; } + a.button { + font-size: 16px; line-height: normal; height: auto; padding: 5px 10px 5px 35px; font-weight: bold; margin: 0; position: relative; + img { position: absolute; top: 3px; left: 6px; } + } + div.flash.notice { + margin: 10px 0 0; font-size: 22px; + small { font-weight: normal; margin: 0 0 10px; } + } + div.flash.notice.done { margin: 0; } + small { + display: block; margin: 10px 0 0; font-size: 11px; color: #808080; line-height: 12px; + img.favicon { vertical-align: middle; } + } + div.blank { + border: none; background: none; padding: 10px 0 0; border-top: 1px solid (@accent_colour * 0.5 + @white * 0.5); + margin: 10px 0 0; + } + } + } +} + +/* People Lists */ +ul.people { + margin: 0; padding: 10px 0 0 10px; background: @white; + > li { + display: block; margin: 0 10px 10px 0; float: left; padding: 2px; width: 57px; position: relative; + .border_radius(2); background: @white; list-style: none; border: 1px solid (@light_grey * 0.33 + @white * 0.66); + a.avatar { + display: block; width: 59px; height: 59px; overflow: hidden; + img { width: 100%; height: 100%; } + } + a.name { display: block; font-size: 10px; text-align: center; } + :hover { + background: @accent_colour; + a.name { color: @white; } + } + } +} +ul.people.list { + padding: 0; + > li { + margin: 0 0 10px; padding: 0 0 10px; overflow: hidden; float: none; width: auto; .border_radius(0); + border: none; border-bottom: 1px solid (@light_grey * 0.33 + @white * 0.66); + span.points { + float: right; display: block; padding: 5px; background: @light_grey * 0.15 + @white * 0.85; line-height: 1; + text-align: center; width: 50px; height: 30px; .border_radius(3); margin: 0 0 0 10px; + strong { display: block; color: @black; font-size: 16px; margin: 2px 0 0; } + label { color: @grey; text-transform: uppercase; font-size: 10px; } + label.long { display: block; } + label.short { display: none; } + } + a.avatar { float: left; width: 40px; height: 40px; } + a.name { font-size: 14px; font-weight: bold; margin: 0 0 0 50px; text-align: left; } + a.name.long { display: inline; } + a.name.short { display: none; } + span.networks { + display: block; margin: 0 0 0 50px; + img.favicon { vertical-align: middle; } + } + :hover { + background: transparent; + a.name { color: @accent_colour * 0.85 + @black * 0.15; } + } + :last-child { padding-bottom: 0; border-bottom: none; margin-bottom: 0; } + } +} +ul.people.list.small { + > li { + span.points { + padding: 3px 6px; height: 18px; font-size: 9px; line-height: 17px; width: 60px; + strong { font-size: 12px; margin: 0; display: inline; } + label { font-size: 9px; } + label.long { display: none; } + label.short { display: inline; } + } + a.avatar { width: 24px; height: 24px; } + a.name { display: inline; line-height: 24px; margin: 0 0 0 5px; font-size: 12px; height: 24px; } + a.name.long { display: none; } + a.name.short { display: inline; } + span.networks { display: inline; margin: 0; } + :last-child { padding-bottom: 0; border-bottom: none; margin-bottom: 0; } + } +} +ul.people.tiled { + > li { + width: 28px; padding: 2px; + a.avatar { width: 24px; height: 24px; background: @white; padding: 2px; } + a.name, small, span.networks, span.points { display: none; } + } +} + +/* Comments */ +#comments { + ul { + margin: 0 0 20px; padding: 0; + li { + display: block; list-style: none; padding: 0; margin: 0 0 10px; + span.meta { + margin: 0; overflow: hidden; display: block; + small { font-size: 12px; color: @light_grey; float: right; line-height: 16px; display: inline-block; } + a.avatar { + display: inline-block; height: 16px; width: 16px; position: relative; top: 3px; + img { height: 100%; width: 100%; } + } + a.name { font-weight: bold; line-height: 16px; display: inline-block; } + span.inactive { color: @grey; font-weight: bold; line-height: 16px; display: inline-block; } + } + b.tail { + display: block; width: 0; height: 0; margin: 3px 0 0 10px; border: 5px solid transparent; border-top: none; + border-bottom: 5px solid @white; position: relative; z-index: 2; + } + blockquote { + margin: 0; padding: 10px; .border_radius(3); font-style: normal; background: @white; + color: @dark_grey; .box_shadow(0, 0, 3, @light_grey * 0.66 + @white * 0.33); + } + } + } + form { + margin: 0; + textarea { width: 500px; } + } +} + +/* Sidebar Categories */ +#sidebar { + #categories { + margin: 0 0 20px; + width: auto; + p { margin: 0; } + } +} + +#sidebar { + #ads > ul li, #recommendations > ul li { + width: 81px; + div.thumbnail { + a.thumbnail { height: 60px; width: 81px; } + } + div.text { + a.title { font-size: 11px; height: 14px; line-height: 14px; } + small { display: none; } + } + } + #brands > ul li { + width: 55px; + div.thumbnail { + a.thumbnail { + height: 45px; width: 45px; + img { max-height: 45px; max-width: 45px; } + } + } + div.text { display: none; } + } +} + +/* My Account */ +#accounts_controller { + #top { + #page_title { + #page_options { + a.button.public_profile { + float: right; font-size: 16px; line-height: 1; height: auto; padding: 8px 35px 8px 15px; position: relative; + b.arrow { display: block; height: 0; width: 0; position: absolute; top: 10px; right: 15px; border: 6px solid transparent; border-right: none; border-left: 6px solid @white; margin: 0; } + } + a.button.goto_dashboard { + float: right; font-size: 16px; line-height: 1; height: auto; padding: 8px 15px 8px 35px; margin-right: 5px; position: relative; + b.arrow { display: block; height: 0; width: 0; position: absolute; top: 10px; left: 15px; border: 6px solid transparent; border-left: none; border-right: 6px solid @white; margin: 0; } + } + } + } + } + #account_nav { + float: left; width: 200px; margin: 0 20px 0 0; + ul.nav { + margin: 0; padding: 0; + li { + margin: 0 0 5px; display: block; list-style: none; padding: 0; + a { + display: block; height: 30px; text-decoration: none; color: @white; + b { + border: 15px solid transparent; border-right: none; border-left: 10px solid transparent; width: 0; + height: 0; float: right; display: none; + } + span { + .border_radius(3); background: @bg; display: block; + line-height: 30px; padding: 0 10px; font-size: 14px; font-weight: bold; margin: 0 10px 0 0; + } + } + :hover { + a { + color: @white; + b { border-left-color: @bg; display: block; } + span { background: @bg; .border_radius_right(0); } + } + } + } + li.current a { + b { border-left-color: @accent_colour; display: block; } + span { background: @accent_colour; color: @white; .border_radius_right(0); } + } + } + } + #main { + > div { + margin: 0 0 20px; + form { margin: 0; } + } + #profile { + a.avatar { + float: left; display: block; + width: 70px; overflow: hidden; position: relative; text-decoration: none; + img { width: 100%; } + span { + display: block; line-height: 1; padding: 3px; margin: 5px 0 0; color: @white; background: @accent_colour; + .border_radius(3); .text_shadow(1, 1, 0, @grey); + text-align: center; font-size: 10px; font-weight: bold; text-transform: uppercase; + } + } + form { + margin: 0 0 0 90px; + h4 { margin: 10px 0 20px; border-bottom: 1px solid (@light_grey * 0.5 + @white * 0.5); padding: 0; color: @bg; font-size: 16px; } + ul.choices { + li { width: 30%; } + } + div.extra { margin-top: 20px; } + } + } + + #networks { + ul { margin: 0 -10px -10px 0; padding: 0; overflow: hidden; + li:hover + { + background: @light_grey; display: block; float: left; width: 180px; + padding: 10px; margin: 0 10px 10px 0; list-style: none; .border_radius(3); + position: relative; + * { line-height: normal; } + img { vertical-align: middle; float: left; } + .name { font-weight: bold; font-size: 14px; display: block; margin: -2px 0 0 42px; } + small { + font-size: 12px; color: @grey; display: block; margin-left: 42px; + strong { color: @black; font-weight: normal; } + } + :hover { + } + } + li.installed { + background: @white; + border: 2px solid @accent_colour; padding: 8px; + } + li.unavailable { + .name { color: @black; } + :hover { + background: @light_grey; + } + } + li:hover { + background: @light_grey * 0.5 + @white * 0.5; + } + } + } + } +} + +/* Shopping Style Panel */ +#shopping_style { + div.header a.button.small { float: right; } + div.content { + p { + margin: 0 0 10px; + label { text-transform: uppercase; font-size: 11px; display: block; color: @bg; font-weight: bold; } + span { color: @black; } + span.toggle { white-space: nowrap; color: @grey; } + :last-child { margin: 0; } + } + p.more { text-align: left; font-weight: normal; } + p.less { display: none; margin: 0; } + } +} + +/* People Controller */ +#people_controller.index { + #main { + div.panel { + float: left; width: 300px; margin: 0 20px 0 0; + :last-child { margin-right: 0; } + } + } +} +#people_controller.show { + #person_overview, #shopping_style { + a.button.small { + } + } + #content { + #shopping_style { + float: left; width: 240px; margin: 0 20px 0 0; + } + #main { width: 360px; } + } +} + +/* Search Results */ +#search_results { + margin: 0 0 20px; + li { + :hover { + small { color: @white * 0.75 + @accent_colour * 0.25; } + } + } +} +#search { + div.content { + padding: 20px; + form { + margin: 0; float: none; + span.submit_and_options { + display: block; + } + } + p { margin: 0 0 15px; } + h4 { font-weight: normal; margin: 0 0 5px; } + } +} + +/* Recommendations */ +#recommendations { + div.browse { + margin: 0; padding: 0; background: none; + ul { min-height: 0; .border_radius(0); } + } +} + +/* Blank States */ +div.blank { + padding: 20px; background: @bg * 0.05 + @blue * 0.05 + @white * 0.9; position: relative; + border: 1px solid (@bg * 0.1 + @blue * 0.1 + @white * 0.8); z-index: 1; + h4 { font-size: 18px; margin: 0 0 10px; } + h4:last-child { margin: 0; } + p { font-size: 16px; margin: 0 0 10px; } + p:last-child { margin: 0; } + p.with_list_number.large { + span { margin-left: 48px; display: block; color: @white; } + } + p.earn span { font-size: 22px; color: @white; line-height: 48px; font-weight: bold; } + a { white-space: nowrap; } + a.hide { + position: absolute; top: -5px; right: -5px; display: block; height: 16px; width: 16px; padding: 3px; background: #E7E9F6; .border_radius(99); + } +} + +div.blank.small { + padding: 10px 20px; + h4 { font-weight: normal; font-size: 16px; } + p { margin: 0; } +} +div.blank.tiny { + padding: 10px 20px; + h4 { font-weight: normal; font-size: 14px; } + p { margin: 0; font-size: 12px; } +} +div.blank.rounded { + .border_radius(3); margin: 0 0 20px; +} +div.blank.rounded.bottom { .border_radius_top(0); } +div.blank.with_border_bottom { border-bottom: 1px solid (@bg * 0.1 + @blue * 0.1 + @white * 0.8); } +div.blank.no_border_top { border-top: none; } +div.blank.no_border_bottom { border-bottom: none; } +div.blank.no_side_borders { border-right: none; border-left: none; } +div.panel { + div.blank { + padding: 10px 20px; overflow: hidden; margin: 0; + h4 { font-weight: normal; font-size: 14px; } + p, ul { margin: 0 0 10px; font-size: 12px; } + p:last-child, ul:last-child { margin: 0; } + } +} + +/* Sidebar Browse */ +#sidebar { + div.panel { + div.content.browse { + padding: 0; margin: 0; + > ul { + min-height: 0; .border_radius(0); + > li { + div.thumbnail { + a.thumbnail { padding: 5px; } + img.marker.media_type { top: 48px; left: 8px; } + } + div.footer { + a.title, a.name { font-size: 11px; font-weight: normal; } + } + } + } + } + + div.content.browse.ads > ul > li { + width: 93px; + > div.thumbnail a.thumbnail { width: 83px; height: 62px; } + } + div.content.browse.brands { + .border_radius(3); + > ul { + background: none; + > li { + width: 52px; + > div.thumbnail { + padding: 3px; + a.thumbnail { width: 42px; height: 42px; padding: 2px; } + } + li.active { background: @accent_colour; } + } + } + } + div.footer { + div.info { float: none; } + div.pagination { float: none; margin: 3px 0 0; } + } + } +} + +/* List Numbers */ +label.list_number { + float: left; background: url('/images/transparent_backgrounds/black_15.png'); padding: 2px; width: 24px; height: 24px; display: block; + .border_radius(99); + b { + display: block; font-weight: bold; font-size: 14px; color: @white; background: @accent_colour; height: 20px; width: 20px; line-height: 20px; + text-align: center; .border_radius(99); .text_shadow(1, 1, 0px, (@accent_colour * 0.75 + @black * 0.25)); + border: 2px solid @white; + } +} +label.list_number.large { + padding: 4px; width: 48px; height: 48px; .border_radius(99); position: relative; left: -10px; + b { + font-size: 28px; height: 40px; width: 40px; .border_radius(99); line-height: 40px; + .text_shadow(2, 2, 0px, (@accent_colour * 0.75 + @black * 0.25)); border-width: 4px; + } +} + +/* Dashboard */ +#dashboard_controller { + #ads { + span.filter.state { float: right; } + } + #sidebar { + #shopping_style div.content { + p.less { display: block; } + p.more { display: none; } + } + #influences { + div.header { + padding-bottom: 0; + ul.tabs { + position: relative; top: 1px; z-index: 3; + li { + margin: 0 5px 0 0; + a { + border: none; background: url('/images/transparent_backgrounds/white_75.png'); + :hover { color: @black; } + } + } + li.active { + a { + background: @white; border: none; + :hover { color: @black; } + } + } + } + } + + div.tab_content { + overflow: hidden; padding: 0; + > ul { + padding: 10px 10px 0; max-height: 280px; min-height: 120px; overflow-y: scroll; .border_radius_bottom(3px); + } + } + div.footer { + form { + p { + margin: 0 0 5px; + img.marker { float: right; margin: 5px 0 0 0; } + span.invitee { + line-height: 26px; padding: 3px 3px 0; font-size: 14px; + small { color: @grey; font-size: 12px; } + } + } + p.indent { margin-left: 36px; } + p.submit { margin-top: 10px; } + } + } + } + } + + div.panel.full { + > div.content { + margin: 0; padding: 0; background: none; + ul { + li { + width: 148px; + div.thumbnail { + img.marker.media_type { top: 90px; } + a.thumbnail { width: 138px; height: 104px; } + } + } + } + } + } + #people { + form { + padding: 0 0 5px; + input { width: 225px; float: left; margin: 0 5px 0 0; } + a.button { height: 23px; line-height: 23px; width: 60px; padding: 0; text-align: center; } + } + } +} + +/* Remove Pages Titles when Browsing */ +#ads_controller, #brands_controller { + #page_title { display: none; } +} + +/* Brands > Show */ +#brands_controller.show { + #ads { + div.filters { + h3 { font-size: 16px; margin: 0; } + span.show { float: right; } + span.filter.dropdown.localisation { float: right; margin: 0 0 0 10px; } + span.filter.state { float: right; margin: 0 0 0 10px; } + } + } +} + +/* FAQ */ +#pages_controller.faq { + #answers { + h3 { margin-top: 20px; padding-top: 20px; border-top: 1px solid (@light_grey * 0.75 + @white * 0.25); } + h3.first { margin-top: 0; padding-top: 0; border: none; } + } + #questions { + div.content { + padding: 20px; + ul { + margin: 0; padding: 0; + li { + margin: 0 0 10px; list-style: none; display: block; padding: 0; + a { font-size: 14px; } + } + li:last-child { + margin: 0; + } + } + } + } +} + +/* Person Overview */ +#person_overview { + padding: 20px 10px; position: relative; z-index: 25; + #person { + float: left; width: 620px; + a.avatar { + display: block; float: left; width: 60px; height: 60px; + img { height: 100%; width: 100%; } + } + > div { + margin: 0 0 0 75px; color: @white; font-size: 14px; .text_shadow(1, 1, 0, @bg * 0.66 + @black * 0.33); + } + div.name { + h2 { + margin: 0 0 5px; display: inline; + a { + font-size: 20px; font-weight: bold; .text_shadow(1, 1, 0, @bg * 0.66 + @black * 0.33); + line-height: 1; color: @white; text-decoration: none; + :hover { text-decoration: underline; } + } + a.button.small { + font-size: 10px; + :hover { text-decoration: none; } + } + } + + span.points { + float: right; display: block; padding: 5px 10px; .border_radius(2); text-align: center; background: @white; position: relative; + min-width: 45px; + strong { color: @black; font-weight: bold; font-size: 24px; line-height: 1; display: block; .text_shadow(0, 0, 0, transparent); } + label { font-size: 9px; text-transform: uppercase; color: @grey; display: block; .text_shadow(0, 0, 0, transparent); font-weight: bold; } + } + span.points.with_redeem { + .border_radius_bottom(0); + a.button { + display: block; text-align: center; .border_radius_top(0); font-size: 10px; font-weight: bold; padding: 0; + position: absolute; height: 18px; left: 0; right: 0; bottom: -19px; line-height: 18px; text-transform: uppercase; border: none; + } + } + div.options { margin: 0; } + } + div.meta { + color: @white * 0.66 + @bg * 0.33; + span { color: @white; } + label { color: @white * 0.66 + @bg * 0.33; } + ul.networks { + display: inline; margin: 0; padding: 0; + li { + display: inline; line-height: 1; + img { position: relative; vertical-align: middle; top: -1px; } + } + } + } + + div.extra { + font-size: 12px; margin-top: 20px; margin-bottom: 20px; + span.toggle { + .text_shadow(1, 1, 0, @bg * 0.66 + @black * 0.33); + a { font-size: 10px; font-weight: bold; text-transform: uppercase; text-decoration: none; color: @accent_colour; } + b.arrow { display: inline-block; width: 0; height: 0; border: 5px solid transparent; position: relative; top: -2px; } + } + #less_info { + span.toggle { + b.arrow { border-top: 5px solid @accent_colour; border-bottom: 0; } + } + } + #more_info { + span.toggle { + float: right; + b.arrow { border-bottom: 5px solid @accent_colour; border-top: 0; } + } + h4 { + color: @white; margin: 0 0 10px 0; border-bottom: 1px solid (@white * 0.25 + @bg * 0.75); .text_shadow(1, 1, 0, @bg * 0.66 + @black * 0.33); + span { font-size: 12px; } + } + p { + margin: 0 0 5px; + label { display: block; float: left; width: 120px; color: @white * 0.66 + @bg * 0.33; } + span { display: block; margin: 0 0 0 130px; } + } + p:last-child { margin: 0; } + + } + } + div.login { + margin: 0 0 0 75px; + a.button { font-weight: bold; } + } + } +} + +/* Dashboard Nav */ +#dashboard_nav { + position: absolute; bottom: 0; left: 10px; margin: 0; padding: 0; overflow: hidden; + li { + display: block; float: left; margin: 0 5px 0 0; + a { + display: block; height: 28px; padding: 0 10px; line-height: 28px; .border_radius_top(2); + text-decoration: none; color: @white; background: url('/images/transparent_backgrounds/accent_colour_30.png'); font-size: 14px; + font-weight: bold; + :hover { background: url('/images/transparent_backgrounds/accent_colour_45.png'); } + } + } + li.active { + a { + background: @white; color: @black; + :hover { color: @black; } + } + } +} + +/* Dwellometer */ +#dwellometer { + z-index: 45; float: right; .box_shadow(0, 0, 0, transparent); margin: 0; + div.content { + text-align: center; position: relative; + object, object embed { position: relative; z-index: 46; line-height: 0; } + div.title { + position: absolute; bottom: 10px; left: 0; right: 0; z-index: 50; + img { width: 120px; display: block; margin: 0 auto; position: relative; left: -5px; } + } + } +} + +/* Activity Stream */ +#activity { + div.content { + ul.events { + padding: 0; margin: 0 0 -10px; + li { + margin: 0; padding: 10px 0; border-bottom: 1px solid (@light_grey * 0.33 + @white * 0.66); + list-style: none; overflow: hidden; + small.meta { + font-size: 12px; color: @light_grey; float: right; + } + a.button { float: right; margin: 0 0 10px 10px; } + a.avatar, a.logo, a.thumbnail { + height: 32px; display: block; float: left; + img { width: 100%; height: 100%; } + } + a.avatar, a.logo, a.icon { width: 32px; } + a.thumbnail { width: 42px; } + div.symbols { + float: left; overflow: hidden; + b { + display: block; float: left; margin: 10px 5px 0; + img { height: 12px; width: 12px; } + } + b.voted { margin: 10px 3px 0; padding: 2px; .border_radius(2); } + b.voted.for { background: @colour_positive * 0.33 + @white * 0.66; } + b.voted.against { background: @colour_negative * 0.33 + @white * 0.66; } + } + /* Temporarily removed avatar and symbol */ +/* div.symbols a.agent, b { display: none; }*/ + div.description { + font-size: 12px; color: @grey; + a.agent { font-weight: bold; } + } + div.comment { + margin-top: 2px; + b.tail { + display: block; margin: 0 0 0 10px; width: 0; height: 0; border: 5px solid transparent; + border-top: none; border-bottom: 5px solid (@light_grey * 0.25 + @white * 0.75); + } + blockquote { + margin: 0; font-style: normal; color: @dark_grey; + .border_radius(3); background: @light_grey * 0.25 + @white * 0.75; padding: 5px 10px; + span.view_comment { + color: @grey; + } + } + } + div.content { + overflow: hidden; + } + } + li.new_comment.ad, li.endorsed.ad, li.voted { + div.description, div.content { margin-left: 106px; } +/* div.description, div.content { margin-left: 53px; }*/ + } + li.new_comment.brand, li.replied_to, li.endorsed.brand, li.connected, li.sn_setup { + div.description, div.content { margin-left: 96px; } +/* div.description, div.content { margin-left: 43px; }*/ + } + li.replied_to { + div.content { + a.thumbnail, a.logo { margin-top: 7px; } + } + } + li.replied_to.ad { + div.content { + div.comment { margin-left: 52px; } + } + } + li.replied_to.brand { + div.content { + div.comment { margin-left: 42px; } + } + } + li.voted div.description span.action { .border_radius(2); color: @dark_grey; padding: 0 3px; white-space: nowrap; } + li.voted.for div.description span.action { background: @colour_positive * 0.15 + @white * 0.85; } + li.voted.against div.description span.action { background: @colour_negative * 0.15 + @white * 0.85; } + li:first-child { padding-top: 0; } + li:last-child { border-bottom: none; } + li:hover div.content div.comment blockquote span.view_comment { + } + } + } +} + +/* Login/Register Modal */ +#login_register { + div.location_select, + div.location_search { margin-left: 130px; } + h3 { + small { font-size: 14px; font-weight: normal; display: block; color: @grey; text-align: left; margin: 0; display: block; } + } +} + +/* Contact Form in Pages */ +#pages_controller { + #sidebar { + #contact { + margin: 15px 0 0; + form { + label { text-align: left; float: none; width: auto; font-size: 12px; font-weight: bold; line-height: 1; margin: 0 0 5px; } + p.submit.indent { + margin: 0; + span.with_cancel { display: none; } + } + } + } + } +} + +/* Exclusive Offers */ +#offers { + div.content { + a.gift { + display: block; text-align: center; + img { height: 100px; } + } + } +} + +div.browse { + margin: 0 0 20px; + &.class { + padding: 0; + } + div.header { + padding: 10px 10px 9px; text-align: left; background: @bg url('/images/panel_header_bg.png') repeat-x top left; + border-bottom: 1px solid (@bg * 0.66 + @black * 0.33); line-height: 1; height: 18px; + .border_radius_top(3); color: @light_grey; + h3 { font-size: 16px; margin: 0; color: @white; .text_shadow(1, 1, 0, @bg * 0.66 + @black * 0.33); } + span.filter { + float: left; display: block; overflow: hidden; position: relative; z-index: 5; + a { + margin: 0 1px 0 0; display: block; float: left; padding: 0 8px; height: 18px; font-weight: bold; font-size: 10px; line-height: 18px; + text-transform: uppercase; background: url('/images/transparent_backgrounds/black_50.png'); color: @light_grey; text-decoration: none; position: relative; z-index: 3; + .active { + background: @white; color: @black; z-index: 4; + :hover { color: @black; } + } + :hover { color: @white; } + :first-child { .border_radius_left(2); } + :last-child { .border_radius_right(2); margin-right: 0; } + } + } + + span.filter.dropdown { + margin: 0; position: relative; overflow: visible; + a { + .border_radius(2); background: @white; color: @black; margin: 0; position: relative; padding-right: 25px; + img { float: left; margin: 4px 5px 0 0; } + b.arrow { + float: right; display: block; height: 0; width: 0; border: 5px solid transparent; border-top: 5px solid @black; border-bottom: none; + position: absolute; top: 6px; right: 10px; + } + :hover { + background: @accent_colour; color: @white; + b.arrow { border-top: 5px solid @white; } + } + } + ul { + position: absolute; top: 100%; left: 0; margin: 1px 0 0; padding: 0; background: @white; .border_radius(2); + .box_shadow(0, 1, 1, @black); + li { + list-style: none; display: block; padding: 0; margin: 0; + a { + display: block; height: 18px; line-height: 18px; color: @black; font-size: 10px; text-transform: uppercase; background: transparent; + border-bottom: 1px solid (@light_grey * 0.66 + @white * 0.33); float: none; margin: 0; .border_radius(0); white-space: nowrap; + :hover { background: url('/images/transparent_backgrounds/accent_colour_25.png'); color: @black; } + } + :last-child { + a { border: none; } + } + } + } + } + span.filter.dropdown.sort { float: left; margin: 0 0 0 10px; } + span.filter.dropdown.localisation { float: left; margin: 0 0 0 10px; } + a.more { + float: right; color: @white; .text_shadow(1, 1, 0, @bg * 0.66 + @black * 0.33); font-size: 14px; font-weight: bold; + position: relative; top: 2px; + :hover { text-decoration: none; } + } + } + > ul { + margin: 0; background: @white; padding: 10px 0 0 10px; .border_radius(3); position: relative; + li { + display: block; float: left; list-style: none; margin: 0 10px 10px 0; padding: 5px; position: relative; + background: @white; width: 130px; border: 1px solid (@light_grey * 0.33 + @white * 0.66); .border_radius(2); + a.remove { + position: absolute; height: 16px; width: 16px; padding: 3px; background: @accent_colour; + .border_radius(99); display: none; z-index: 3; top: -8px; right: -8px; + img { vertical-align: middle; } + } + div.thumbnail { + .border_radius_top(3); position: relative; z-index: 3; + .marker { + position: absolute; padding: 2px; .border_radius(2); z-index: 3; + background: url('/images/transparent_backgrounds/white_75.png'); height: 12px; width: 12px; + } + .marker.coupon { + height: auto; width: auto; top: 10px; right: -3px; padding: 0; background: transparent; overflow: hidden; position: absolute; + b { + display: block; height: 0; width: 0; float: left; border: 14px solid transparent; border-top: 14px solid @accent_colour; + border-bottom: none; border-right: none; float: left; + } + span { + color: @white; font-size: 10px; font-weight: bold; text-transform: uppercase; height: 14px; line-height: 14px; display: block; + padding: 0 4px 0 2px; background: @accent_colour; .text_shadow(1, 1, 0px, (@accent_colour * 0.75 + @black * 0.25)); margin: 0 0 0 14px; + } + } + .marker.video { + position: absolute; left: 50%; top: 50%; background: @white; width: 10px; height: 10px; + b { display: block; width: 0; height: 0; border: 5px solid transparent; border-left: 10px solid @black; border-right: none; } + } + .marker.endorsed_by_me { background: none; padding: 0; right: 0; bottom: -32px; .border_radius(2); background: @white; } + a.thumbnail { + display: block; overflow: hidden; position: relative; text-align: center; + img { position: relative; display: block; margin: auto; } + } + } + div.text { + margin: 3px 0 0; display: block; + a { text-decoration: none; } + a.title { + display: block; text-decoration: none; font-weight: bold; font-size: 12px; line-height: 16px; + white-space: nowrap; height: 16px; overflow: hidden; + :before { + display: block; height: 32px; width: 20px; content: " "; float: right; right: -15px; top: -8px; + background: @white; position: relative; z-index: 1; .box_shadow(-5, 0, 10, @white); + } + } + small { + font-size: 11px; line-height: 13px; color: @grey; display: block; height: 13px; overflow: hidden; white-space: nowrap; + a { font-weight: bold; } + :before { + display: block; height: 32px; width: 20px; content: " "; float: right; right: -15px; top: -8px; + background: @white; position: relative; z-index: 1; .box_shadow(-5, 0, 10, @white); + } + } + } + :hover { + background: @accent_colour; + a.remove { display: block; } + div.thumbnail { + a.marker.remove, a.marker.video { + b { display: inline-block; } + } + a.marker.video { .box_shadow(0, 0, 2, @black); } + } + div.text { + a { color: @white; } + a.title:before { background: @accent_colour; .box_shadow(-5, 0, 10, @accent_colour); } + small { + color: @white * 0.75 + @accent_colour * 0.25; + :before { background: @accent_colour; .box_shadow(-5, 0, 10, @accent_colour); } + } + } + div.footer a { color: @white; } + } + } + > li.ad div.thumbnail a.thumbnail { + width: 130px; height: 97px; + img { width: 100%; height: 100%; } + } + > li.brand div.thumbnail a.thumbnail { + width: 120px; height: 87px; padding: 5px; background: @white; .border_radius(2); + img { max-width: 120px; max-height: 87px; } + } + li.paginate { + margin-bottom: 0; + a { + display: block; position: relative; text-decoration: none; height: 131px; + div.arrow { + background: #81c153 url('/images/button_bg.png') repeat-x left top; border: 1px solid (@accent_colour * 0.75 + @black * 0.25); + height: 44px; .border_radius(99); width: 44px; margin: 0 auto; position: relative; top: 32px; + b { text-indent: -9000px; display: block; border: 10px solid transparent; width: 0; height: 0; position: relative; top: 12px; } + } + div.label { + position: absolute; bottom: 5px; left: 0; right: 0; line-height: 13px; + color: @accent_colour * 0.85 + @black * 0.15; text-decoration: none; + font-weight: bold; font-size: 12px; text-align: center; + } + :hover { + div.arrow { background: #abd56e url('/images/button_bg.png') repeat-x left -44px; } + } + } + :hover { background: transparent; } + } + li.paginate.previous a div b { border-right: 15px solid @white; border-left: none; left: 12px; } + li.paginate.next a div b { border-left: 15px solid @white; border-right: none; left: 16px; } + } + > div.footer { + padding: 9px 10px 10px; background: @light_grey * 0.75 + @white * 0.25; overflow: hidden; + border-top: 1px solid @light_grey; .border_radius_bottom(3); + div.info { + float: left; color: @grey; + strong { color: @black; font-weight: normal; } + } + div.pagination { + float: right; + > * { + display: inline-block; line-height: 1; padding: 0 6px; line-height: 18px; height: 18px; background: @white; + .border_radius(3); text-decoration: none; font-weight: bold; + font-size: 10px; text-transform: uppercase; + } + a { color: @grey; } + a:hover { color: @black; } + span.disabled { color: @light_grey; } + span.current { color: @white; background: @bg; border: none; } + span.current:hover { color: @white; } + } + } +} +div.browse.with_categories { margin: 0 0 0 160px; } +div.browse.with_options > ul { .border_radius_top(0); } +div.browse.with_footer > ul { .border_radius_bottom(0); } +/* Browse List */ +div.browse.list { +> ul { + margin: 0; min-height: 320px; + padding: 10px 0 0 10px; overflow: hidden; + > li { + display: block; list-style: none; margin: 0 10px 10px 0; padding: 5px; + .border_radius(3); position: relative; line-height: normal; + .marker { + position: absolute; padding: 2px; .border_radius(2); + background: url('/images/transparent_backgrounds/white_75.png'); + img { height: 12px; width: 12px; } + } + img.marker { height: 12px; width: 12px; } + span.marker.new { + color: black; left: -5px; top: -5px; background: none; background-color: @white * 0.1 + @yellow * 0.6 + @red * 0.3; line-height: 1; padding: 2px 5px; + font-weight: bold; + } + a.marker.media_type { + display: inline-block; text-decoration: none; top: 39px; left: 8px; + font-size: 10px; + b { font-weight: normal; margin: 0 0 0 2px; line-height: 1; display: none; } + img { vertical-align: middle; } + } + a.thumbnail { + float: left; + width: 68px; display: block; overflow: hidden; + border: 1px solid @light_grey; + :hover { border-color: @accent_colour; } + } + span.title_brand { + display: block; margin: 0 0 2px 75px; + a { margin: 0; display: inline; } + a.brand_name { font-weight: normal; font-size: 12px; } + } + a.ad_title { + font-weight: bold; font-size: 14px; margin: 0 0 0 75px; display: block; + } + a.brand_name { + font-weight: bold; font-size: 14px; margin: 0 0 0 75px; display: block; + } + small { + display: block; color: @grey; margin: 0 0 0 75px; font-size: 12px; + } + small.brand_name { display: inline; margin: 0; } + ul.chart { + margin: 0 0 0 80px; + height: 39px; + } + ul.networks { + margin: 3px 0 0 75px; padding: 0; overflow: hidden; + li { display: block; float: left; margin: 0 5px 0 0; line-height: 1; } + } + div.points { + display: none; + font-size: 12px; text-align: right; + label { color: @grey; } + } + a.remove { bottom: -3px; right: -3px; } + } + li.ad { + a.thumbnail { height: 51px; } + span.title_brand { + small.brand_name { + display: block; + } + } + } + li.brand { + a.thumbnail { height: 68px; } + } + } +} +div.browse.list.with_options ul { .border_radius_top(0); } +div.browse.list.with_footer ul { .border_radius_bottom(0); } +div.browse.list.cols_2 { + > ul { + > li { + width: 285px; float: left; + :hover { + background: @white; + } + } + } +} +div.browse.ads.list { + > ul { + > li { + height: 53px; + a.thumbnail { + height: 51px; + } + } + } +} +div.browse.brands.list { + > ul { + > li { + height: 68px; + a.thumbnail { + height: 66px; + } + } + } +} + +/* Categories List */ +#categories { + margin: 40px 0 0; width: 160px; float: left; position: relative; z-index: 1; + ul { + margin: 0; padding: 10px 0 0; + li { + list-style: none; margin: 0; padding: 0; font-size: 14px; + a { color: @grey; display: block; padding: 5px 10px 5px 15px; text-decoration: none; .border_radius_left(3); } + a:hover { color: @black; background: @light_grey * 0.15 + @white * 0.85; } + } + .all a { font-weight: bold; } + .current a { + background: @white; color: @black; border: 1px solid (@light_grey * 0.25 + @white * 0.75); border-right: none; border-left: 5px solid @bg; + padding-left: 10px; + } + } +} + +/* Ads > Show */ +#ad { + div.header { + overflow: hidden; + h3 { font-size: 16px; margin: 0 0 3px; } + small { + a.category { font-weight: bold; color: @accent_colour; } + span.networks img { position: relative; top: 3px; } + } + span.brand { + float: right; color: @white; + a.brand_name { font-weight: bold; color: @accent_colour; } + } + } + div.content { + padding: 0; position: relative; + a.toggle_size { + display: block; .border_radius(3); background-color: @black; padding: 0 5px 0 26px; + background-position: 5px center; background-repeat: no-repeat; text-decoration: none; margin: 5px 5px 0 0; + position: absolute; top: 0; right: 0; line-height: 25px; z-index: 45; + } + img.creative { margin: 0 auto; max-width: 540px; display: block; } + object { position: relative; z-index: 44; } + object.video { line-height: 0; font-size: 0; } + object embed { position: relative; z-index: 45; line-height: 0; font-size: 0; } + } + div.content.not_video { + padding: 40px; text-align: center; + * { margin-left: auto; margin-right: auto; } + object.flash { margin-bottom: 0; } + } + div.footer { + padding: 0; + div.vote_views { + padding: 5px 10px; overflow: hidden; + div.share { float: right; margin: 2px 0 0 0; } + #login_register_msg, #encourage_vote_msg { line-height: 22px; font-weight: bold; color: @black; } + } + } +} +#sidebar { + #meta { + table { + margin: 0; + tr:last-child td { padding-bottom: 0; } + td { + padding: 0 0 5px; + ul.networks { + margin: 0; padding: 0; + li { + list-style: none; display: inline; + } + li { + } + } + } + td.label { color: @grey; white-space: nowrap; width: 1%; text-align: right; padding-right: 5px; } + } + } +} + +/* Voting */ +div.voted { + font-size: 12px; line-height: 22px; color: @black; display: inline-block; font-weight: bold; + img { float: left; margin-right: 5px; padding: 3px; .border_radius(3); } +} +#voted_up { + img { background: @colour_positive * 0.66 + @bg * 0.15; } +} +#voted_down { + img { background: @colour_negative * 0.66 + @bg * 0.15; } +} +#encourage_comment { + display: inline-block; line-height: 22px; font-weight: bold; +} +#vote { + overflow: hidden; font-size: 12px; line-height: 22px; color: @black; float: left; + a { + color: @white; font-weight: bold; overflow: hidden; display: block; + width: 16px; text-decoration: none; text-align: center; font-size: 10px; padding: 3px; text-transform: uppercase; + } + a.up { + float: left; background: @colour_positive * 0.66 + @bg * 0.15; .border_radius_left(3); + :hover { background: @colour_positive * 0.85 + @bg * 0.15; } + } + a.down { + float: left; background: @colour_negative * 0.66 + @bg * 0.15; .border_radius_right(3); + margin: 0 5px 0 1px; + :hover { background: @colour_negative * 0.85 + @bg * 0.15; } + } +} +#vote.disabled { + a.up { + background: (@colour_positive * 0.66 + @bg * 0.15) * 0.15 + @grey * 0.85; + :hover { background: (@colour_positive * 0.85 + @bg * 0.15) * 0.25 + @grey * 0.75; } + } + a.down { + background: (@colour_negative * 0.66 + @bg * 0.15) * 0.15 + @grey * 0.85; + :hover { background: (@colour_negative * 0.85 + @bg * 0.15) * 0.25 + @grey * 0.75; } + } +} +#sidebar { + #ads > ul li, #recommendations > ul li { + width: 81px; + div.thumbnail { + a.thumbnail { height: 60px; width: 81px; } + } + div.text { + a.title { font-size: 11px; height: 14px; line-height: 14px; } + small { display: none; } + } + } + #brands > ul li { + width: 55px; + div.thumbnail { + a.thumbnail { + height: 45px; width: 45px; + img { max-height: 45px; max-width: 45px; } + } + } + div.text { display: none; } + } +} + +/* My Account */ +#accounts_controller { + #top { + #page_title { + #page_options { + a.button.public_profile { + float: right; font-size: 16px; line-height: 1; height: auto; padding: 8px 35px 8px 15px; position: relative; + b.arrow { display: block; height: 0; width: 0; position: absolute; top: 10px; right: 15px; border: 6px solid transparent; border-right: none; border-left: 6px solid @white; margin: 0; } + } + a.button.goto_dashboard { + float: right; font-size: 16px; line-height: 1; height: auto; padding: 8px 15px 8px 35px; margin-right: 5px; position: relative; + b.arrow { display: block; height: 0; width: 0; position: absolute; top: 10px; left: 15px; border: 6px solid transparent; border-left: none; border-right: 6px solid @white; margin: 0; } + } + } + } + } + #account_nav { + float: left; width: 200px; margin: 0 20px 0 0; + ul.nav { + margin: 0; padding: 0; + li { + margin: 0 0 5px; display: block; list-style: none; padding: 0; + a { + display: block; height: 30px; text-decoration: none; color: @white; + b { + border: 15px solid transparent; border-right: none; border-left: 10px solid transparent; width: 0; + height: 0; float: right; display: none; + } + span { + .border_radius(3); background: @bg; display: block; + line-height: 30px; padding: 0 10px; font-size: 14px; font-weight: bold; margin: 0 10px 0 0; + } + } + :hover { + a { + color: @white; + b { border-left-color: @bg; display: block; } + span { background: @bg; .border_radius_right(0); } + } + } + } + li.current a { + b { border-left-color: @accent_colour; display: block; } + span { background: @accent_colour; color: @white; .border_radius_right(0); } + } + } + } + #main { + > div { + margin: 0 0 20px; + form { margin: 0; } + } + #profile { + a.avatar { + float: left; display: block; + width: 70px; overflow: hidden; position: relative; text-decoration: none; + img { width: 100%; } + span { + display: block; line-height: 1; padding: 3px; margin: 5px 0 0; color: @white; background: @accent_colour; + .border_radius(3); .text_shadow(1, 1, 0, @grey); + text-align: center; font-size: 10px; font-weight: bold; text-transform: uppercase; + } + } + form { + margin: 0 0 0 90px; + h4 { margin: 10px 0 20px; border-bottom: 1px solid (@light_grey * 0.5 + @white * 0.5); padding: 0; color: @bg; font-size: 16px; } + ul.choices { + li { width: 30%; } + } + div.extra { margin-top: 20px; } + } + } + + #networks { + ul { margin: 0 -10px -10px 0; padding: 0; overflow: hidden; + li:hover + { + background: @light_grey; display: block; float: left; width: 180px; + padding: 10px; margin: 0 10px 10px 0; list-style: none; .border_radius(3); + position: relative; + * { line-height: normal; } + img { vertical-align: middle; float: left; } + .name { font-weight: bold; font-size: 14px; display: block; margin: -2px 0 0 42px; } + small { + font-size: 12px; color: @grey; display: block; margin-left: 42px; + strong { color: @black; font-weight: normal; } + } + :hover { + } + } + li.installed { + background: @white; + border: 2px solid @accent_colour; padding: 8px; + } + li.unavailable { + .name { color: @black; } + :hover { + background: @light_grey; + } + } + li:hover { + background: @light_grey * 0.5 + @white * 0.5; + } + } + } + } +} + +/* Shopping Style Panel */ +#shopping_style { + div.header a.button.small { float: right; } + div.content { + p { + margin: 0 0 10px; + label { text-transform: uppercase; font-size: 11px; display: block; color: @bg; font-weight: bold; } + span { color: @black; } + span.toggle { white-space: nowrap; color: @grey; } + :last-child { margin: 0; } + } + p.more { text-align: left; font-weight: normal; } + p.less { display: none; margin: 0; } + } +} + +/* People Controller */ +#people_controller.index { + #main { + div.panel { + float: left; width: 300px; margin: 0 20px 0 0; + :last-child { margin-right: 0; } + } + } +} +#people_controller.show { + #person_overview, #shopping_style { + a.button.small { + } + } + #content { + #shopping_style { + float: left; width: 240px; margin: 0 20px 0 0; + } + #main { width: 360px; } + } +} + +/* Search Results */ +#search_results { + margin: 0 0 20px; + li { + :hover { + small { color: @white * 0.75 + @accent_colour * 0.25; } + } + } +} +#search { + div.content { + padding: 20px; + form { + margin: 0; float: none; + span.submit_and_options { + display: block; + } + } + p { margin: 0 0 15px; } + h4 { font-weight: normal; margin: 0 0 5px; } + } +} + +/* Recommendations */ +#recommendations { + div.browse { + margin: 0; padding: 0; background: none; + ul { min-height: 0; .border_radius(0); } + } +} + +/* Blank States */ +div.blank { + padding: 20px; background: @bg * 0.05 + @blue * 0.05 + @white * 0.9; position: relative; + border: 1px solid (@bg * 0.1 + @blue * 0.1 + @white * 0.8); z-index: 1; + h4 { font-size: 18px; margin: 0 0 10px; } + h4:last-child { margin: 0; } + p { font-size: 16px; margin: 0 0 10px; } + p:last-child { margin: 0; } + p.with_list_number.large { + span { margin-left: 48px; display: block; color: @white; } + } + p.earn span { font-size: 22px; color: @white; line-height: 48px; font-weight: bold; } + a { white-space: nowrap; } + a.hide { + position: absolute; top: -5px; right: -5px; display: block; height: 16px; width: 16px; padding: 3px; background: #E7E9F6; .border_radius(99); + } +} + +div.blank.small { + padding: 10px 20px; + h4 { font-weight: normal; font-size: 16px; } + p { margin: 0; } +} +div.blank.tiny { + padding: 10px 20px; + h4 { font-weight: normal; font-size: 14px; } + p { margin: 0; font-size: 12px; } +} +div.blank.rounded { + .border_radius(3); margin: 0 0 20px; +} +div.blank.rounded.bottom { .border_radius_top(0); } +div.blank.with_border_bottom { border-bottom: 1px solid (@bg * 0.1 + @blue * 0.1 + @white * 0.8); } +div.blank.no_border_top { border-top: none; } +div.blank.no_border_bottom { border-bottom: none; } +div.blank.no_side_borders { border-right: none; border-left: none; } +div.panel { + div.blank { + padding: 10px 20px; overflow: hidden; margin: 0; + h4 { font-weight: normal; font-size: 14px; } + p, ul { margin: 0 0 10px; font-size: 12px; } + p:last-child, ul:last-child { margin: 0; } + } +} + +#yelow { + #short { + color: #fea; + } + #long { + color: #ffeeaa; + } + #rgba { + color: rgba(255, 238, 170, 0.1); + } +} + +#blue { + #short { + color: #00f; + } + #long { + color: #0000ff; + } + #rgba { + color: rgba(0, 0, 255, 0.1); + } +} + +#overflow { + .a { color: #111111 - #444444; } // #000000 + .b { color: #eee + #fff; } // #ffffff + .c { color: #aaa * 3; } // #ffffff + .d { color: #00ee00 + #009900; } // #00ff00 +} + +#grey { + color: rgb(200, 200, 200); +} + +#808080 { + color: hsl(50, 0%, 50%); +} + +#00ff00 { + color: hsl(120, 100%, 50%); +} +/******************\ +* * +* Comment Header * +* * +\******************/ + +/* + + Comment + +*/ + +/* + * Comment Test + * + * - cloudhead (http://cloudhead.net) + * + */ + +//////////////// +@var: "content"; +//////////////// + +/* Colors + * ------ + * #EDF8FC (background blue) + * #166C89 (darkest blue) + * + * Text: + * #333 (standard text) // A comment within a comment! + * #1F9EC9 (standard link) + * + */ + +/* @group Variables +------------------- */ +#comments /* boo */ { + /**/ // An empty comment + color: red; /* A C-style comment */ + background-color: orange; // A little comment + font-size: 12px; + + /* lost comment */ content: @var; + + border: 1px solid black; + + // padding & margin // + padding: 0; + margin: 2em; +} // + +/* commented out + #more-comments { + color: grey; + } +*/ + +#last { color: blue } +// +.comma-delimited { + background: url(bg.jpg) no-repeat, url(bg.png) repeat-x top left, url(bg); + text-shadow: -1px -1px 1px red, 6px 5px 5px yellow; + -moz-box-shadow: 0pt 0pt 2px rgba(255, 255, 255, 0.4) inset, + 0pt 4px 6px rgba(255, 255, 255, 0.4) inset; +} +@font-face { + font-family: Headline; + src: local(Futura-Medium), + url(fonts.svg#MyGeometricModern) format("svg"); +} +.other { + -moz-transform: translate(0, 11em) rotate(-90deg); +} +p:not([class*="lead"]) { + color: black; +} + +input[type="text"].class#id[attr=32]:not(1) { + color: white; +} + +div#id.class[a=1][b=2].class:not(1) { + color: white; +} + +ul.comma > li:not(:only-child)::after { + color: white; +} + +ol.comma > li:nth-last-child(2)::after { + color: white; +} + +li:nth-child(4n+1), +li:nth-child(-5n), +li:nth-child(-n+2) { + color: white; +} + +a[href^="http://"] { + color: black; +} + +a[href$="http://"] { + color: black; +} + +form[data-disabled] { + color: black; +} + +p::before { + color: black; +} +@charset "utf-8"; +div { color: black; } +div { width: 99%; } + +* { + min-width: 45em; +} + +h1, h2 > a > p, h3 { + color: none; +} + +div.class { + color: blue; +} + +div#id { + color: green; +} + +.class#id { + color: purple; +} + +.one.two.three { + color: grey; +} + +@media print { + font-size: 3em; +} + +@media screen { + font-size: 10px; +} + +@font-face { + font-family: 'Garamond Pro'; + src: url("/fonts/garamond-pro.ttf"); +} + +a:hover, a:link { + color: #999; +} + +p, p:first-child { + text-transform: none; +} + +q:lang(no) { + quotes: none; +} + +p + h1 { + font-size: 2.2em; +} + +#shorthands { + border: 1px solid #000; + font: 12px/16px Arial; + margin: 1px 0; + padding: 0 auto; + background: url("http://www.lesscss.org/spec.html") no-repeat 0 4px; +} + +#more-shorthands { + margin: 0; + padding: 1px 0 2px 0; + font: normal small/20px 'Trebuchet MS', Verdana, sans-serif; +} + +.misc { + -moz-border-radius: 2px; + display: -moz-inline-stack; + width: .1em; + background-color: #009998; + background-image: url(images/image.jpg); + background: -webkit-gradient(linear, left top, left bottom, from(red), to(blue)); + margin: ; +} + +#important { + color: red !important; + width: 100%!important; + height: 20px ! important; +} + +#functions { + @var: 10; + color: color("evil red"); // #660000 + width: increment(15); + height: undefined("self"); + border-width: add(2, 3); + variable: increment(@var); +} + +#built-in { + @r: 32; + escaped: e("-Some::weird(#thing, y)"); + lighten: lighten(#ff0000, 50%); + darken: darken(#ff0000, 50%); + saturate: saturate(#29332f, 20%); + desaturate: desaturate(#203c31, 20%); + greyscale: greyscale(#203c31); + format: %("rgb(%d, %d, %d)", @r, 128, 64); + format-string: %("hello %s", "world"); + eformat: e(%("rgb(%d, %d, %d)", @r, 128, 64)); +} + +@var: @a; +@a: 100%; + +.lazy-eval { + width: @var; +} +.mixin (@a: 1px, @b: 50%) { + width: @a * 5; + height: @b - 1%; +} + +.mixina (@style, @width, @color: black) { + border: @width @style @color; +} + +.mixiny +(@a: 0, @b: 0) { + margin: @a; + padding: @b; +} + +.hidden() { + color: transparent; +} + +.two-args { + color: blue; + .mixin(2px, 100%); + .mixina(dotted, 2px); +} + +.one-arg { + .mixin(3px); +} + +.no-parens { + .mixin; +} + +.no-args { + .mixin(); +} + +.var-args { + @var: 9; + .mixin(@var, @var * 2); +} + +.multi-mix { + .mixin(2px, 30%); + .mixiny(4, 5); +} + +.maxa(@arg1: 10, @arg2: #f00) { + padding: @arg1 * 2px; + color: @arg2; +} + +body { + .maxa(15); +} + +@glob: 5; +.global-mixin(@a:2) { + width: @glob + @a; +} + +.scope-mix { + .global-mixin(3); +} + +.nested-ruleset (@width: 200px) { + width: @width; + .column { margin: @width; } +} +.content { + .nested-ruleset(600px); +} + +// + +.same-var-name2(@radius) { + radius: @radius; +} +.same-var-name(@radius) { + .same-var-name2(@radius); +} +#same-var-name { + .same-var-name(5px); +} + +// + +.var-inside () { + @var: 10px; + width: @var; +} +#var-inside { .var-inside; } +.mix-inner (@var) { + border-width: @var; +} + +.mix (@a: 10) { + .inner { + height: @a * 10; + + .innest { + width: @a; + .mix-inner(@a * 2); + } + } +} + +.class { + .mix(30); +} +.mixin () { + zero: 0; +} +.mixin (@a: 1px) { + one: 1; +} +.mixin (@a) { + one-req: 1; +} +.mixin (@a: 1px, @b: 2px) { + two: 2; +} + +.mixin (@a, @b, @c) { + three-req: 3; +} + +.mixin (@a: 1px, @b: 2px, @c: 3px) { + three: 3; +} + +.zero { + .mixin(); +} + +.one { + .mixin(1); +} + +.two { + .mixin(1, 2); +} + +.three { + .mixin(1, 2, 3); +} + +// + +.mixout ('left') { + left: 1; +} + +.mixout ('right') { + right: 1; +} + +.left { + .mixout('left'); +} +.right { + .mixout('right'); +} + +// + +.border (@side, @width) { + color: black; + .border-side(@side, @width); +} +.border-side (left, @w) { + border-left: @w; +} +.border-side (right, @w) { + border-right: @w; +} + +.border-right { + .border(right, 4px); +} +.border-left { + .border(left, 4px); +} + +// + + +.border-radius (@r) { + both: @r * 10; +} +.border-radius (@r, left) { + left: @r; +} +.border-radius (@r, right) { + right: @r; +} + +.only-right { + .border-radius(33, right); +} +.only-left { + .border-radius(33, left); +} +.left-right { + .border-radius(33); +} +.mixin { border: 1px solid black; } +.mixout { border-color: orange; } +.borders { border-style: dashed; } + +#namespace { + .borders { + border-style: dotted; + } + .biohazard { + content: "death"; + .man { + color: transparent; + } + } +} +#theme { + > .mixin { + background-color: grey; + } +} +#container { + color: black; + .mixin; + .mixout; + #theme > .mixin; +} + +#header { + .milk { + color: white; + .mixin; + #theme > .mixin; + } + #cookie { + .chips { + #namespace .borders; + .calories { + #container; + } + } + .borders; + } +} +.secure-zone { #namespace .biohazard .man; } +.direct { + #namespace > .borders; +} +#operations { + color: #110000 + #000011 + #001100; // #111111 + height: 10px / 2px + 6px - 1px * 2; // 9px + width: 2 * 4 - 5em; // 3em + .spacing { + height: 10px / 2px+6px-1px*2; + width: 2 * 4-5em; + } + substraction: 20 - 10 - 5 - 5; // 0 + division: 20 / 5 / 4; // 1 +} + +@x: 4; +@y: 12em; + +.with-variables { + height: @x + @y; // 16em + width: 12 + @y; // 24em + size: 5cm - @x; // 1cm +} + +@z: -2; + +.negative { + height: 2px + @z; // 0px + width: 2px - @z; // 4px +} + +.shorthands { + padding: -1px 2px 0 -4px; // +} + +.colors { + color: #123; // #112233 + border-color: #234 + #111111; // #334455 + background-color: #222222 - #fff; // #000000 + .other { + color: 2 * #111; // #222222 + border-color: #333333 / 3 + #111; // #222222 + } +} +.parens { + @var: 1px; + border: (@var * 2) solid black; + margin: (@var * 1) (@var + 2) (4 * 4) 3; + width: (6 * 6); + padding: 2px (6px * 6px); +} + +.more-parens { + @var: (2 * 2); + padding: (2 * @var) 4 4 (@var * 1px); + width: (@var * @var) * 6; + height: (7 * 7) + (8 * 8); + margin: 4 * (5 + 5) / 2 - (@var * 2); + //margin: (6 * 6)px; +} + +.nested-parens { + width: 2 * (4 * (2 + (1 + 6))) - 1; + height: ((2+3)*(2+3) / (9-4)) + 1; +} + +.mixed-units { + margin: 2px 4em 1 5pc; + padding: (2px + 4px) 1em 2px 2; +} +#first > .one { + > #second .two > #deux { + width: 50%; + #third { + &:focus { + color: black; + #fifth { + > #sixth { + .seventh #eighth { + + #ninth { + color: purple; + } + } + } + } + } + height: 100%; + } + #fourth, #five, #six { + color: #110000; + .seven, .eight > #nine { + border: 1px solid black; + } + #ten { + color: red; + } + } + } + font-size: 2em; +} +@x: blue; +@z: transparent; +@mix: none; + +.mixin { + @mix: #989; +} + +.tiny-scope { + color: @mix; // #989 + .mixin; +} + +.scope1 { + @y: orange; + @z: black; + color: @x; // blue + border-color: @z; // black + .hidden { + @x: #131313; + } + .scope2 { + @y: red; + color: @x; // blue + .scope3 { + @local: white; + color: @y; // red + border-color: @z; // black + background-color: @local; // white + } + } +}h1, h2, h3 { + a, p { + &:hover { + color: red; + } + } +} + +#all { color: blue; } +#the { color: blue; } +#same { color: blue; } + +ul, li, div, q, blockquote, textarea { + margin: 0; +} + +td { + margin: 0; + padding: 0; +} + +td, input { + line-height: 1em; +} +#strings { + background-image: url("http://son-of-a-banana.com"); + quotes: "~" "~"; + content: "#*%:&^,)!.(~*})"; + empty: ""; + brackets: "{" "}"; +} +#comments { + content: "/* hello */ // not-so-secret"; +} +#single-quote { + quotes: "'" "'"; + content: '""#!&""'; + empty: ''; +} +@a: 2; +@x: @a * @a; +@y: @x + 1; +@z: @x * 2 + @y; + +.variables { + width: @z + 1cm; // 14cm +} + +@b: @a * 10; +@c: #888; + +@fonts: "Trebuchet MS", Verdana, sans-serif; +@f: @fonts; + +@quotes: "~" "~"; +@q: @quotes; + +.variables { + height: @b + @x + 0px; // 24px + color: @c; + font-family: @f; + quotes: @q; +} + +.redefinition { + @var: 4; + @var: 2; + @var: 3; + three: @var; +} + +.values { + @a: 'Trebuchet'; + font-family: @a, @a, @a; +} + + +.whitespace + { color: white; } + +.whitespace +{ + color: white; +} + .whitespace +{ color: white; } + +.whitespace{color:white;} +.whitespace { color : white ; } + +.white, +.space, +.mania +{ color: white; } + +.no-semi-column { color: white } +.no-semi-column { + color: white; + white-space: pre +} +.no-semi-column {border: 2px solid white} +.newlines { + background: the, + great, + wall; + border: 2px + solid + black; +} +.empty { + +} +#yelow { + #short { + color: #fea; + } + #long { + color: #ffeeaa; + } + #rgba { + color: rgba(255, 238, 170, 0.1); + } +} + +#blue { + #short { + color: #00f; + } + #long { + color: #0000ff; + } + #rgba { + color: rgba(0, 0, 255, 0.1); + } +} + +#overflow { + .a { color: #111111 - #444444; } // #000000 + .b { color: #eee + #fff; } // #ffffff + .c { color: #aaa * 3; } // #ffffff + .d { color: #00ee00 + #009900; } // #00ff00 +} + +#grey { + color: rgb(200, 200, 200); +} + +#808080 { + color: hsl(50, 0%, 50%); +} + +#00ff00 { + color: hsl(120, 100%, 50%); +} +/******************\ +* * +* Comment Header * +* * +\******************/ + +/* + + Comment + +*/ + +/* + * Comment Test + * + * - cloudhead (http://cloudhead.net) + * + */ + +//////////////// +@var: "content"; +//////////////// + +/* Colors + * ------ + * #EDF8FC (background blue) + * #166C89 (darkest blue) + * + * Text: + * #333 (standard text) // A comment within a comment! + * #1F9EC9 (standard link) + * + */ + +/* @group Variables +------------------- */ +#comments /* boo */ { + /**/ // An empty comment + color: red; /* A C-style comment */ + background-color: orange; // A little comment + font-size: 12px; + + /* lost comment */ content: @var; + + border: 1px solid black; + + // padding & margin // + padding: 0; + margin: 2em; +} // + +/* commented out + #more-comments { + color: grey; + } +*/ + +#last { color: blue } +// +.comma-delimited { + background: url(bg.jpg) no-repeat, url(bg.png) repeat-x top left, url(bg); + text-shadow: -1px -1px 1px red, 6px 5px 5px yellow; + -moz-box-shadow: 0pt 0pt 2px rgba(255, 255, 255, 0.4) inset, + 0pt 4px 6px rgba(255, 255, 255, 0.4) inset; +} +@font-face { + font-family: Headline; + src: local(Futura-Medium), + url(fonts.svg#MyGeometricModern) format("svg"); +} +.other { + -moz-transform: translate(0, 11em) rotate(-90deg); +} +p:not([class*="lead"]) { + color: black; +} + +input[type="text"].class#id[attr=32]:not(1) { + color: white; +} + +div#id.class[a=1][b=2].class:not(1) { + color: white; +} + +ul.comma > li:not(:only-child)::after { + color: white; +} + +ol.comma > li:nth-last-child(2)::after { + color: white; +} + +li:nth-child(4n+1), +li:nth-child(-5n), +li:nth-child(-n+2) { + color: white; +} + +a[href^="http://"] { + color: black; +} + +a[href$="http://"] { + color: black; +} + +form[data-disabled] { + color: black; +} + +p::before { + color: black; +} +@charset "utf-8"; +div { color: black; } +div { width: 99%; } + +* { + min-width: 45em; +} + +h1, h2 > a > p, h3 { + color: none; +} + +div.class { + color: blue; +} + +div#id { + color: green; +} + +.class#id { + color: purple; +} + +.one.two.three { + color: grey; +} + +@media print { + font-size: 3em; +} + +@media screen { + font-size: 10px; +} + +@font-face { + font-family: 'Garamond Pro'; + src: url("/fonts/garamond-pro.ttf"); +} + +a:hover, a:link { + color: #999; +} + +p, p:first-child { + text-transform: none; +} + +q:lang(no) { + quotes: none; +} + +p + h1 { + font-size: 2.2em; +} + +#shorthands { + border: 1px solid #000; + font: 12px/16px Arial; + margin: 1px 0; + padding: 0 auto; + background: url("http://www.lesscss.org/spec.html") no-repeat 0 4px; +} + +#more-shorthands { + margin: 0; + padding: 1px 0 2px 0; + font: normal small/20px 'Trebuchet MS', Verdana, sans-serif; +} + +.misc { + -moz-border-radius: 2px; + display: -moz-inline-stack; + width: .1em; + background-color: #009998; + background-image: url(images/image.jpg); + background: -webkit-gradient(linear, left top, left bottom, from(red), to(blue)); + margin: ; +} + +#important { + color: red !important; + width: 100%!important; + height: 20px ! important; +} + +#functions { + @var: 10; + color: color("evil red"); // #660000 + width: increment(15); + height: undefined("self"); + border-width: add(2, 3); + variable: increment(@var); +} + +#built-in { + @r: 32; + escaped: e("-Some::weird(#thing, y)"); + lighten: lighten(#ff0000, 50%); + darken: darken(#ff0000, 50%); + saturate: saturate(#29332f, 20%); + desaturate: desaturate(#203c31, 20%); + greyscale: greyscale(#203c31); + format: %("rgb(%d, %d, %d)", @r, 128, 64); + format-string: %("hello %s", "world"); + eformat: e(%("rgb(%d, %d, %d)", @r, 128, 64)); +} + +@var: @a; +@a: 100%; + +.lazy-eval { + width: @var; +} +.mixin (@a: 1px, @b: 50%) { + width: @a * 5; + height: @b - 1%; +} + +.mixina (@style, @width, @color: black) { + border: @width @style @color; +} + +.mixiny +(@a: 0, @b: 0) { + margin: @a; + padding: @b; +} + +.hidden() { + color: transparent; +} + +.two-args { + color: blue; + .mixin(2px, 100%); + .mixina(dotted, 2px); +} + +.one-arg { + .mixin(3px); +} + +.no-parens { + .mixin; +} + +.no-args { + .mixin(); +} + +.var-args { + @var: 9; + .mixin(@var, @var * 2); +} + +.multi-mix { + .mixin(2px, 30%); + .mixiny(4, 5); +} + +.maxa(@arg1: 10, @arg2: #f00) { + padding: @arg1 * 2px; + color: @arg2; +} + +body { + .maxa(15); +} + +@glob: 5; +.global-mixin(@a:2) { + width: @glob + @a; +} + +.scope-mix { + .global-mixin(3); +} + +.nested-ruleset (@width: 200px) { + width: @width; + .column { margin: @width; } +} +.content { + .nested-ruleset(600px); +} + +// + +.same-var-name2(@radius) { + radius: @radius; +} +.same-var-name(@radius) { + .same-var-name2(@radius); +} +#same-var-name { + .same-var-name(5px); +} + +// + +.var-inside () { + @var: 10px; + width: @var; +} +#var-inside { .var-inside; } +.mix-inner (@var) { + border-width: @var; +} + +.mix (@a: 10) { + .inner { + height: @a * 10; + + .innest { + width: @a; + .mix-inner(@a * 2); + } + } +} + +.class { + .mix(30); +} +.mixin () { + zero: 0; +} +.mixin (@a: 1px) { + one: 1; +} +.mixin (@a) { + one-req: 1; +} +.mixin (@a: 1px, @b: 2px) { + two: 2; +} + +.mixin (@a, @b, @c) { + three-req: 3; +} + +.mixin (@a: 1px, @b: 2px, @c: 3px) { + three: 3; +} + +.zero { + .mixin(); +} + +.one { + .mixin(1); +} + +.two { + .mixin(1, 2); +} + +.three { + .mixin(1, 2, 3); +} + +// + +.mixout ('left') { + left: 1; +} + +.mixout ('right') { + right: 1; +} + +.left { + .mixout('left'); +} +.right { + .mixout('right'); +} + +// + +.border (@side, @width) { + color: black; + .border-side(@side, @width); +} +.border-side (left, @w) { + border-left: @w; +} +.border-side (right, @w) { + border-right: @w; +} + +.border-right { + .border(right, 4px); +} +.border-left { + .border(left, 4px); +} + +// + + +.border-radius (@r) { + both: @r * 10; +} +.border-radius (@r, left) { + left: @r; +} +.border-radius (@r, right) { + right: @r; +} + +.only-right { + .border-radius(33, right); +} +.only-left { + .border-radius(33, left); +} +.left-right { + .border-radius(33); +} +.mixin { border: 1px solid black; } +.mixout { border-color: orange; } +.borders { border-style: dashed; } + +#namespace { + .borders { + border-style: dotted; + } + .biohazard { + content: "death"; + .man { + color: transparent; + } + } +} +#theme { + > .mixin { + background-color: grey; + } +} +#container { + color: black; + .mixin; + .mixout; + #theme > .mixin; +} + +#header { + .milk { + color: white; + .mixin; + #theme > .mixin; + } + #cookie { + .chips { + #namespace .borders; + .calories { + #container; + } + } + .borders; + } +} +.secure-zone { #namespace .biohazard .man; } +.direct { + #namespace > .borders; +} +#operations { + color: #110000 + #000011 + #001100; // #111111 + height: 10px / 2px + 6px - 1px * 2; // 9px + width: 2 * 4 - 5em; // 3em + .spacing { + height: 10px / 2px+6px-1px*2; + width: 2 * 4-5em; + } + substraction: 20 - 10 - 5 - 5; // 0 + division: 20 / 5 / 4; // 1 +} + +@x: 4; +@y: 12em; + +.with-variables { + height: @x + @y; // 16em + width: 12 + @y; // 24em + size: 5cm - @x; // 1cm +} + +@z: -2; + +.negative { + height: 2px + @z; // 0px + width: 2px - @z; // 4px +} + +.shorthands { + padding: -1px 2px 0 -4px; // +} + +.colors { + color: #123; // #112233 + border-color: #234 + #111111; // #334455 + background-color: #222222 - #fff; // #000000 + .other { + color: 2 * #111; // #222222 + border-color: #333333 / 3 + #111; // #222222 + } +} +.parens { + @var: 1px; + border: (@var * 2) solid black; + margin: (@var * 1) (@var + 2) (4 * 4) 3; + width: (6 * 6); + padding: 2px (6px * 6px); +} + +.more-parens { + @var: (2 * 2); + padding: (2 * @var) 4 4 (@var * 1px); + width: (@var * @var) * 6; + height: (7 * 7) + (8 * 8); + margin: 4 * (5 + 5) / 2 - (@var * 2); + //margin: (6 * 6)px; +} + +.nested-parens { + width: 2 * (4 * (2 + (1 + 6))) - 1; + height: ((2+3)*(2+3) / (9-4)) + 1; +} + +.mixed-units { + margin: 2px 4em 1 5pc; + padding: (2px + 4px) 1em 2px 2; +} +#first > .one { + > #second .two > #deux { + width: 50%; + #third { + &:focus { + color: black; + #fifth { + > #sixth { + .seventh #eighth { + + #ninth { + color: purple; + } + } + } + } + } + height: 100%; + } + #fourth, #five, #six { + color: #110000; + .seven, .eight > #nine { + border: 1px solid black; + } + #ten { + color: red; + } + } + } + font-size: 2em; +} +@x: blue; +@z: transparent; +@mix: none; + +.mixin { + @mix: #989; +} + +.tiny-scope { + color: @mix; // #989 + .mixin; +} + +.scope1 { + @y: orange; + @z: black; + color: @x; // blue + border-color: @z; // black + .hidden { + @x: #131313; + } + .scope2 { + @y: red; + color: @x; // blue + .scope3 { + @local: white; + color: @y; // red + border-color: @z; // black + background-color: @local; // white + } + } +}h1, h2, h3 { + a, p { + &:hover { + color: red; + } + } +} + +#all { color: blue; } +#the { color: blue; } +#same { color: blue; } + +ul, li, div, q, blockquote, textarea { + margin: 0; +} + +td { + margin: 0; + padding: 0; +} + +td, input { + line-height: 1em; +} +#strings { + background-image: url("http://son-of-a-banana.com"); + quotes: "~" "~"; + content: "#*%:&^,)!.(~*})"; + empty: ""; + brackets: "{" "}"; +} +#comments { + content: "/* hello */ // not-so-secret"; +} +#single-quote { + quotes: "'" "'"; + content: '""#!&""'; + empty: ''; +} +@a: 2; +@x: @a * @a; +@y: @x + 1; +@z: @x * 2 + @y; + +.variables { + width: @z + 1cm; // 14cm +} + +@b: @a * 10; +@c: #888; + +@fonts: "Trebuchet MS", Verdana, sans-serif; +@f: @fonts; + +@quotes: "~" "~"; +@q: @quotes; + +.variables { + height: @b + @x + 0px; // 24px + color: @c; + font-family: @f; + quotes: @q; +} + +.redefinition { + @var: 4; + @var: 2; + @var: 3; + three: @var; +} + +.values { + @a: 'Trebuchet'; + font-family: @a, @a, @a; +} + + +.whitespace + { color: white; } + +.whitespace +{ + color: white; +} + .whitespace +{ color: white; } + +.whitespace{color:white;} +.whitespace { color : white ; } + +.white, +.space, +.mania +{ color: white; } + +.no-semi-column { color: white } +.no-semi-column { + color: white; + white-space: pre +} +.no-semi-column {border: 2px solid white} +.newlines { + background: the, + great, + wall; + border: 2px + solid + black; +} +.empty { + +} +#yelow { + #short { + color: #fea; + } + #long { + color: #ffeeaa; + } + #rgba { + color: rgba(255, 238, 170, 0.1); + } +} + +#blue { + #short { + color: #00f; + } + #long { + color: #0000ff; + } + #rgba { + color: rgba(0, 0, 255, 0.1); + } +} + +#overflow { + .a { color: #111111 - #444444; } // #000000 + .b { color: #eee + #fff; } // #ffffff + .c { color: #aaa * 3; } // #ffffff + .d { color: #00ee00 + #009900; } // #00ff00 +} + +#grey { + color: rgb(200, 200, 200); +} + +#808080 { + color: hsl(50, 0%, 50%); +} + +#00ff00 { + color: hsl(120, 100%, 50%); +} +/******************\ +* * +* Comment Header * +* * +\******************/ + +/* + + Comment + +*/ + +/* + * Comment Test + * + * - cloudhead (http://cloudhead.net) + * + */ + +//////////////// +@var: "content"; +//////////////// + +/* Colors + * ------ + * #EDF8FC (background blue) + * #166C89 (darkest blue) + * + * Text: + * #333 (standard text) // A comment within a comment! + * #1F9EC9 (standard link) + * + */ + +/* @group Variables +------------------- */ +#comments /* boo */ { + /**/ // An empty comment + color: red; /* A C-style comment */ + background-color: orange; // A little comment + font-size: 12px; + + /* lost comment */ content: @var; + + border: 1px solid black; + + // padding & margin // + padding: 0; + margin: 2em; +} // + +/* commented out + #more-comments { + color: grey; + } +*/ + +#last { color: blue } +// +.comma-delimited { + background: url(bg.jpg) no-repeat, url(bg.png) repeat-x top left, url(bg); + text-shadow: -1px -1px 1px red, 6px 5px 5px yellow; + -moz-box-shadow: 0pt 0pt 2px rgba(255, 255, 255, 0.4) inset, + 0pt 4px 6px rgba(255, 255, 255, 0.4) inset; +} +@font-face { + font-family: Headline; + src: local(Futura-Medium), + url(fonts.svg#MyGeometricModern) format("svg"); +} +.other { + -moz-transform: translate(0, 11em) rotate(-90deg); +} +p:not([class*="lead"]) { + color: black; +} + +input[type="text"].class#id[attr=32]:not(1) { + color: white; +} + +div#id.class[a=1][b=2].class:not(1) { + color: white; +} + +ul.comma > li:not(:only-child)::after { + color: white; +} + +ol.comma > li:nth-last-child(2)::after { + color: white; +} + +li:nth-child(4n+1), +li:nth-child(-5n), +li:nth-child(-n+2) { + color: white; +} + +a[href^="http://"] { + color: black; +} + +a[href$="http://"] { + color: black; +} + +form[data-disabled] { + color: black; +} + +p::before { + color: black; +} +@charset "utf-8"; +div { color: black; } +div { width: 99%; } + +* { + min-width: 45em; +} + +h1, h2 > a > p, h3 { + color: none; +} + +div.class { + color: blue; +} + +div#id { + color: green; +} + +.class#id { + color: purple; +} + +.one.two.three { + color: grey; +} + +@media print { + font-size: 3em; +} + +@media screen { + font-size: 10px; +} + +@font-face { + font-family: 'Garamond Pro'; + src: url("/fonts/garamond-pro.ttf"); +} + +a:hover, a:link { + color: #999; +} + +p, p:first-child { + text-transform: none; +} + +q:lang(no) { + quotes: none; +} + +p + h1 { + font-size: 2.2em; +} + +#shorthands { + border: 1px solid #000; + font: 12px/16px Arial; + margin: 1px 0; + padding: 0 auto; + background: url("http://www.lesscss.org/spec.html") no-repeat 0 4px; +} + +#more-shorthands { + margin: 0; + padding: 1px 0 2px 0; + font: normal small/20px 'Trebuchet MS', Verdana, sans-serif; +} + +.misc { + -moz-border-radius: 2px; + display: -moz-inline-stack; + width: .1em; + background-color: #009998; + background-image: url(images/image.jpg); + background: -webkit-gradient(linear, left top, left bottom, from(red), to(blue)); + margin: ; +} + +#important { + color: red !important; + width: 100%!important; + height: 20px ! important; +} + +#functions { + @var: 10; + color: color("evil red"); // #660000 + width: increment(15); + height: undefined("self"); + border-width: add(2, 3); + variable: increment(@var); +} + +#built-in { + @r: 32; + escaped: e("-Some::weird(#thing, y)"); + lighten: lighten(#ff0000, 50%); + darken: darken(#ff0000, 50%); + saturate: saturate(#29332f, 20%); + desaturate: desaturate(#203c31, 20%); + greyscale: greyscale(#203c31); + format: %("rgb(%d, %d, %d)", @r, 128, 64); + format-string: %("hello %s", "world"); + eformat: e(%("rgb(%d, %d, %d)", @r, 128, 64)); +} + +@var: @a; +@a: 100%; + +.lazy-eval { + width: @var; +} +.mixin (@a: 1px, @b: 50%) { + width: @a * 5; + height: @b - 1%; +} + +.mixina (@style, @width, @color: black) { + border: @width @style @color; +} + +.mixiny +(@a: 0, @b: 0) { + margin: @a; + padding: @b; +} + +.hidden() { + color: transparent; +} + +.two-args { + color: blue; + .mixin(2px, 100%); + .mixina(dotted, 2px); +} + +.one-arg { + .mixin(3px); +} + +.no-parens { + .mixin; +} + +.no-args { + .mixin(); +} + +.var-args { + @var: 9; + .mixin(@var, @var * 2); +} + +.multi-mix { + .mixin(2px, 30%); + .mixiny(4, 5); +} + +.maxa(@arg1: 10, @arg2: #f00) { + padding: @arg1 * 2px; + color: @arg2; +} + +body { + .maxa(15); +} + +@glob: 5; +.global-mixin(@a:2) { + width: @glob + @a; +} + +.scope-mix { + .global-mixin(3); +} + +.nested-ruleset (@width: 200px) { + width: @width; + .column { margin: @width; } +} +.content { + .nested-ruleset(600px); +} + +// + +.same-var-name2(@radius) { + radius: @radius; +} +.same-var-name(@radius) { + .same-var-name2(@radius); +} +#same-var-name { + .same-var-name(5px); +} + +// + +.var-inside () { + @var: 10px; + width: @var; +} +#var-inside { .var-inside; } +.mix-inner (@var) { + border-width: @var; +} + +.mix (@a: 10) { + .inner { + height: @a * 10; + + .innest { + width: @a; + .mix-inner(@a * 2); + } + } +} + +.class { + .mix(30); +} +.mixin () { + zero: 0; +} +.mixin (@a: 1px) { + one: 1; +} +.mixin (@a) { + one-req: 1; +} +.mixin (@a: 1px, @b: 2px) { + two: 2; +} + +.mixin (@a, @b, @c) { + three-req: 3; +} + +.mixin (@a: 1px, @b: 2px, @c: 3px) { + three: 3; +} + +.zero { + .mixin(); +} + +.one { + .mixin(1); +} + +.two { + .mixin(1, 2); +} + +.three { + .mixin(1, 2, 3); +} + +// + +.mixout ('left') { + left: 1; +} diff --git a/src/dashboard/src/media/vendor/less.js/benchmark/less-benchmark.js b/src/dashboard/src/media/vendor/less.js/benchmark/less-benchmark.js new file mode 100644 index 0000000000..fe142c06c1 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/benchmark/less-benchmark.js @@ -0,0 +1,49 @@ +var path = require('path'), + fs = require('fs'), + sys = require('sys'); + +require.paths.unshift(__dirname, path.join(__dirname, '..')); + +var less = require('lib/less'); +var file = path.join(__dirname, 'benchmark.less'); + +if (process.argv[2]) { file = path.join(process.cwd(), process.argv[2]) } + +fs.readFile(file, 'utf8', function (e, data) { + var tree, css, start, end, total; + + sys.puts("Bechmarking...\n", path.basename(file) + " (" + + parseInt(data.length / 1024) + " KB)", ""); + + start = new(Date); + + new(less.Parser)({ optimization: 2 }).parse(data, function (err, tree) { + end = new(Date); + + total = end - start; + + sys.puts("Parsing: " + + total + " ms (" + + parseInt(1000 / total * + data.length / 1024) + " KB\/s)"); + + start = new(Date); + css = tree.toCSS(); + end = new(Date); + + sys.puts("Generation: " + (end - start) + " ms (" + + parseInt(1000 / (end - start) * + data.length / 1024) + " KB\/s)"); + + total += end - start; + + sys.puts("Total: " + total + "ms (" + + parseInt(1000 / total * data.length / 1024) + " KB/s)"); + + if (err) { + less.writeError(err); + process.exit(3); + } + }); +}); + diff --git a/src/dashboard/src/media/vendor/less.js/bin/lessc b/src/dashboard/src/media/vendor/less.js/bin/lessc new file mode 100644 index 0000000000..1e3c961ff2 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/bin/lessc @@ -0,0 +1,130 @@ +#!/usr/bin/env node + +var path = require('path'), + fs = require('fs'), + sys = require('sys'); + +require.paths.unshift(path.join(__dirname, '..', 'lib')); + +var less = require('less'); +var args = process.argv.slice(1); +var options = { + compress: false, + optimization: 1, + silent: false, + paths: [], + color: true +}; + +args = args.filter(function (arg) { + var match; + + if (match = arg.match(/^-I(.+)$/)) { + options.paths.push(match[1]); + return false; + } + + if (match = arg.match(/^--?([a-z][0-9a-z-]*)(?:=([^\s]+))?$/i)) { arg = match[1] } + else { return arg } + + switch (arg) { + case 'v': + case 'version': + sys.puts("lessc " + less.version.join('.') + " (LESS Compiler) [JavaScript]"); + process.exit(0); + case 'verbose': + options.verbose = true; + break; + case 's': + case 'silent': + options.silent = true; + break; + case 'h': + case 'help': + sys.puts("usage: lessc source [destination]"); + process.exit(0); + case 'x': + case 'compress': + options.compress = true; + break; + case 'no-color': + options.color = false; + break; + case 'include-path': + options.paths = match[2].split(':') + .map(function(p) { + if (p && p[0] == '/') { + return path.join(path.dirname(input), p); + } else if (p) { + return path.join(process.cwd(), p); + } + }); + break; + case 'O0': options.optimization = 0; break; + case 'O1': options.optimization = 1; break; + case 'O2': options.optimization = 2; break; + } +}); + +var input = args[1]; +if (input && input[0] != '/' && input != '-') { + input = path.join(process.cwd(), input); +} +var output = args[2]; +if (output && output[0] != '/') { + output = path.join(process.cwd(), output); +} + +var css, fd, tree; + +if (! input) { + sys.puts("lessc: no input files"); + process.exit(1); +} + +var parseLessFile = function (e, data) { + if (e) { + sys.puts("lessc: " + e.message); + process.exit(1); + } + + new(less.Parser)({ + paths: [path.dirname(input)].concat(options.paths), + optimization: options.optimization, + filename: input + }).parse(data, function (err, tree) { + if (err) { + less.writeError(err, options); + process.exit(1); + } else { + try { + css = tree.toCSS({ compress: options.compress }); + if (output) { + fd = fs.openSync(output, "w"); + fs.writeSync(fd, css, 0, "utf8"); + } else { + sys.print(css); + } + } catch (e) { + less.writeError(e, options); + process.exit(2); + } + } + }); +}; + +if (input != '-') { + fs.readFile(input, 'utf-8', parseLessFile); +} else { + process.stdin.resume(); + process.stdin.setEncoding('utf8'); + + var buffer = ''; + process.stdin.on('data', function(data) { + buffer += data; + }); + + process.stdin.on('end', function() { + parseLessFile(false, buffer); + }); +} diff --git a/src/dashboard/src/media/vendor/less.js/build/ecma-5.js b/src/dashboard/src/media/vendor/less.js/build/ecma-5.js new file mode 100644 index 0000000000..420bd88594 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/build/ecma-5.js @@ -0,0 +1,120 @@ + +// ecma-5.js +// +// -- kriskowal Kris Kowal Copyright (C) 2009-2010 MIT License +// -- tlrobinson Tom Robinson +// dantman Daniel Friesen + +// +// Array +// +if (!Array.isArray) { + Array.isArray = function(obj) { + return Object.prototype.toString.call(obj) === "[object Array]" || + (obj instanceof Array); + }; +} +if (!Array.prototype.forEach) { + Array.prototype.forEach = function(block, thisObject) { + var len = this.length >>> 0; + for (var i = 0; i < len; i++) { + if (i in this) { + block.call(thisObject, this[i], i, this); + } + } + }; +} +if (!Array.prototype.map) { + Array.prototype.map = function(fun /*, thisp*/) { + var len = this.length >>> 0; + var res = new Array(len); + var thisp = arguments[1]; + + for (var i = 0; i < len; i++) { + if (i in this) { + res[i] = fun.call(thisp, this[i], i, this); + } + } + return res; + }; +} +if (!Array.prototype.filter) { + Array.prototype.filter = function (block /*, thisp */) { + var values = []; + var thisp = arguments[1]; + for (var i = 0; i < this.length; i++) { + if (block.call(thisp, this[i])) { + values.push(this[i]); + } + } + return values; + }; +} +if (!Array.prototype.reduce) { + Array.prototype.reduce = function(fun /*, initial*/) { + var len = this.length >>> 0; + var i = 0; + + // no value to return if no initial value and an empty array + if (len === 0 && arguments.length === 1) throw new TypeError(); + + if (arguments.length >= 2) { + var rv = arguments[1]; + } else { + do { + if (i in this) { + rv = this[i++]; + break; + } + // if array contains no values, no initial value to return + if (++i >= len) throw new TypeError(); + } while (true); + } + for (; i < len; i++) { + if (i in this) { + rv = fun.call(null, rv, this[i], i, this); + } + } + return rv; + }; +} +if (!Array.prototype.indexOf) { + Array.prototype.indexOf = function (value /*, fromIndex */ ) { + var length = this.length; + var i = arguments[1] || 0; + + if (!length) return -1; + if (i >= length) return -1; + if (i < 0) i += length; + + for (; i < length; i++) { + if (!Object.prototype.hasOwnProperty.call(this, i)) { continue } + if (value === this[i]) return i; + } + return -1; + }; +} + +// +// Object +// +if (!Object.keys) { + Object.keys = function (object) { + var keys = []; + for (var name in object) { + if (Object.prototype.hasOwnProperty.call(object, name)) { + keys.push(name); + } + } + return keys; + }; +} + +// +// String +// +if (!String.prototype.trim) { + String.prototype.trim = function () { + return String(this).replace(/^\s\s*/, '').replace(/\s\s*$/, ''); + }; +} diff --git a/src/dashboard/src/media/vendor/less.js/build/header.js b/src/dashboard/src/media/vendor/less.js/build/header.js new file mode 100644 index 0000000000..c491d9291e --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/build/header.js @@ -0,0 +1,7 @@ +// +// LESS - Leaner CSS v@VERSION +// http://lesscss.org +// +// Copyright (c) 2009-2011, Alexis Sellier +// Licensed under the Apache 2.0 License. +// diff --git a/src/dashboard/src/media/vendor/less.js/build/require-rhino.js b/src/dashboard/src/media/vendor/less.js/build/require-rhino.js new file mode 100644 index 0000000000..02bc83ce1c --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/build/require-rhino.js @@ -0,0 +1,7 @@ +// +// Stub out `require` in rhino +// +function require(arg) { + return less[arg.split('/')[1]]; +}; + diff --git a/src/dashboard/src/media/vendor/less.js/build/require.js b/src/dashboard/src/media/vendor/less.js/build/require.js new file mode 100644 index 0000000000..4d5b1720fc --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/build/require.js @@ -0,0 +1,7 @@ +// +// Stub out `require` in the browser +// +function require(arg) { + return window.less[arg.split('/')[1]]; +}; + diff --git a/src/dashboard/src/media/vendor/less.js/dist/less-1.1.0.js b/src/dashboard/src/media/vendor/less.js/dist/less-1.1.0.js new file mode 100644 index 0000000000..487c06acd2 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/dist/less-1.1.0.js @@ -0,0 +1,2695 @@ +// +// LESS - Leaner CSS v1.1.0 +// http://lesscss.org +// +// Copyright (c) 2009-2011, Alexis Sellier +// Licensed under the Apache 2.0 License. +// +(function (window, undefined) { +// +// Stub out `require` in the browser +// +function require(arg) { + return window.less[arg.split('/')[1]]; +}; + + +// ecma-5.js +// +// -- kriskowal Kris Kowal Copyright (C) 2009-2010 MIT License +// -- tlrobinson Tom Robinson +// dantman Daniel Friesen + +// +// Array +// +if (!Array.isArray) { + Array.isArray = function(obj) { + return Object.prototype.toString.call(obj) === "[object Array]" || + (obj instanceof Array); + }; +} +if (!Array.prototype.forEach) { + Array.prototype.forEach = function(block, thisObject) { + var len = this.length >>> 0; + for (var i = 0; i < len; i++) { + if (i in this) { + block.call(thisObject, this[i], i, this); + } + } + }; +} +if (!Array.prototype.map) { + Array.prototype.map = function(fun /*, thisp*/) { + var len = this.length >>> 0; + var res = new Array(len); + var thisp = arguments[1]; + + for (var i = 0; i < len; i++) { + if (i in this) { + res[i] = fun.call(thisp, this[i], i, this); + } + } + return res; + }; +} +if (!Array.prototype.filter) { + Array.prototype.filter = function (block /*, thisp */) { + var values = []; + var thisp = arguments[1]; + for (var i = 0; i < this.length; i++) { + if (block.call(thisp, this[i])) { + values.push(this[i]); + } + } + return values; + }; +} +if (!Array.prototype.reduce) { + Array.prototype.reduce = function(fun /*, initial*/) { + var len = this.length >>> 0; + var i = 0; + + // no value to return if no initial value and an empty array + if (len === 0 && arguments.length === 1) throw new TypeError(); + + if (arguments.length >= 2) { + var rv = arguments[1]; + } else { + do { + if (i in this) { + rv = this[i++]; + break; + } + // if array contains no values, no initial value to return + if (++i >= len) throw new TypeError(); + } while (true); + } + for (; i < len; i++) { + if (i in this) { + rv = fun.call(null, rv, this[i], i, this); + } + } + return rv; + }; +} +if (!Array.prototype.indexOf) { + Array.prototype.indexOf = function (value /*, fromIndex */ ) { + var length = this.length; + var i = arguments[1] || 0; + + if (!length) return -1; + if (i >= length) return -1; + if (i < 0) i += length; + + for (; i < length; i++) { + if (!Object.prototype.hasOwnProperty.call(this, i)) { continue } + if (value === this[i]) return i; + } + return -1; + }; +} + +// +// Object +// +if (!Object.keys) { + Object.keys = function (object) { + var keys = []; + for (var name in object) { + if (Object.prototype.hasOwnProperty.call(object, name)) { + keys.push(name); + } + } + return keys; + }; +} + +// +// String +// +if (!String.prototype.trim) { + String.prototype.trim = function () { + return String(this).replace(/^\s\s*/, '').replace(/\s\s*$/, ''); + }; +} +var less, tree; + +if (typeof(window) === 'undefined') { + less = exports, + tree = require('less/tree'); +} else { + if (typeof(window.less) === 'undefined') { window.less = {} } + less = window.less, + tree = window.less.tree = {}; +} +// +// less.js - parser +// +// A relatively straight-forward predictive parser. +// There is no tokenization/lexing stage, the input is parsed +// in one sweep. +// +// To make the parser fast enough to run in the browser, several +// optimization had to be made: +// +// - Matching and slicing on a huge input is often cause of slowdowns. +// The solution is to chunkify the input into smaller strings. +// The chunks are stored in the `chunks` var, +// `j` holds the current chunk index, and `current` holds +// the index of the current chunk in relation to `input`. +// This gives us an almost 4x speed-up. +// +// - In many cases, we don't need to match individual tokens; +// for example, if a value doesn't hold any variables, operations +// or dynamic references, the parser can effectively 'skip' it, +// treating it as a literal. +// An example would be '1px solid #000' - which evaluates to itself, +// we don't need to know what the individual components are. +// The drawback, of course is that you don't get the benefits of +// syntax-checking on the CSS. This gives us a 50% speed-up in the parser, +// and a smaller speed-up in the code-gen. +// +// +// Token matching is done with the `$` function, which either takes +// a terminal string or regexp, or a non-terminal function to call. +// It also takes care of moving all the indices forwards. +// +// +less.Parser = function Parser(env) { + var input, // LeSS input string + i, // current index in `input` + j, // current chunk + temp, // temporarily holds a chunk's state, for backtracking + memo, // temporarily holds `i`, when backtracking + furthest, // furthest index the parser has gone to + chunks, // chunkified input + current, // index of current chunk, in `input` + parser; + + var that = this; + + // This function is called after all files + // have been imported through `@import`. + var finish = function () {}; + + var imports = this.imports = { + paths: env && env.paths || [], // Search paths, when importing + queue: [], // Files which haven't been imported yet + files: {}, // Holds the imported parse trees + mime: env && env.mime, // MIME type of .less files + push: function (path, callback) { + var that = this; + this.queue.push(path); + + // + // Import a file asynchronously + // + less.Parser.importer(path, this.paths, function (root) { + that.queue.splice(that.queue.indexOf(path), 1); // Remove the path from the queue + that.files[path] = root; // Store the root + + callback(root); + + if (that.queue.length === 0) { finish() } // Call `finish` if we're done importing + }, env); + } + }; + + function save() { temp = chunks[j], memo = i, current = i } + function restore() { chunks[j] = temp, i = memo, current = i } + + function sync() { + if (i > current) { + chunks[j] = chunks[j].slice(i - current); + current = i; + } + } + // + // Parse from a token, regexp or string, and move forward if match + // + function $(tok) { + var match, args, length, c, index, endIndex, k, mem; + + // + // Non-terminal + // + if (tok instanceof Function) { + return tok.call(parser.parsers); + // + // Terminal + // + // Either match a single character in the input, + // or match a regexp in the current chunk (chunk[j]). + // + } else if (typeof(tok) === 'string') { + match = input.charAt(i) === tok ? tok : null; + length = 1; + sync (); + } else { + sync (); + + if (match = tok.exec(chunks[j])) { + length = match[0].length; + } else { + return null; + } + } + + // The match is confirmed, add the match length to `i`, + // and consume any extra white-space characters (' ' || '\n') + // which come after that. The reason for this is that LeSS's + // grammar is mostly white-space insensitive. + // + if (match) { + mem = i += length; + endIndex = i + chunks[j].length - length; + + while (i < endIndex) { + c = input.charCodeAt(i); + if (! (c === 32 || c === 10 || c === 9)) { break } + i++; + } + chunks[j] = chunks[j].slice(length + (i - mem)); + current = i; + + if (chunks[j].length === 0 && j < chunks.length - 1) { j++ } + + if(typeof(match) === 'string') { + return match; + } else { + return match.length === 1 ? match[0] : match; + } + } + } + + // Same as $(), but don't change the state of the parser, + // just return the match. + function peek(tok) { + if (typeof(tok) === 'string') { + return input.charAt(i) === tok; + } else { + if (tok.test(chunks[j])) { + return true; + } else { + return false; + } + } + } + + this.env = env = env || {}; + + // The optimization level dictates the thoroughness of the parser, + // the lower the number, the less nodes it will create in the tree. + // This could matter for debugging, or if you want to access + // the individual nodes in the tree. + this.optimization = ('optimization' in this.env) ? this.env.optimization : 1; + + this.env.filename = this.env.filename || null; + + // + // The Parser + // + return parser = { + + imports: imports, + // + // Parse an input string into an abstract syntax tree, + // call `callback` when done. + // + parse: function (str, callback) { + var root, start, end, zone, line, lines, buff = [], c, error = null; + + i = j = current = furthest = 0; + chunks = []; + input = str.replace(/\r\n/g, '\n'); + + // Split the input into chunks. + chunks = (function (chunks) { + var j = 0, + skip = /[^"'`\{\}\/\(\)]+/g, + comment = /\/\*(?:[^*]|\*+[^\/*])*\*+\/|\/\/.*/g, + level = 0, + match, + chunk = chunks[0], + inParam, + inString; + + for (var i = 0, c, cc; i < input.length; i++) { + skip.lastIndex = i; + if (match = skip.exec(input)) { + if (match.index === i) { + i += match[0].length; + chunk.push(match[0]); + } + } + c = input.charAt(i); + comment.lastIndex = i; + + if (!inString && !inParam && c === '/') { + cc = input.charAt(i + 1); + if (cc === '/' || cc === '*') { + if (match = comment.exec(input)) { + if (match.index === i) { + i += match[0].length; + chunk.push(match[0]); + c = input.charAt(i); + } + } + } + } + + if (c === '{' && !inString && !inParam) { level ++; + chunk.push(c); + } else if (c === '}' && !inString && !inParam) { level --; + chunk.push(c); + chunks[++j] = chunk = []; + } else if (c === '(' && !inString && !inParam) { + chunk.push(c); + inParam = true; + } else if (c === ')' && !inString && inParam) { + chunk.push(c); + inParam = false; + } else { + if (c === '"' || c === "'" || c === '`') { + if (! inString) { + inString = c; + } else { + inString = inString === c ? false : inString; + } + } + chunk.push(c); + } + } + if (level > 0) { + throw { + type: 'Syntax', + message: "Missing closing `}`", + filename: env.filename + }; + } + + return chunks.map(function (c) { return c.join('') });; + })([[]]); + + // Start with the primary rule. + // The whole syntax tree is held under a Ruleset node, + // with the `root` property set to true, so no `{}` are + // output. The callback is called when the input is parsed. + root = new(tree.Ruleset)([], $(this.parsers.primary)); + root.root = true; + + root.toCSS = (function (evaluate) { + var line, lines, column; + + return function (options, variables) { + var frames = []; + + options = options || {}; + // + // Allows setting variables with a hash, so: + // + // `{ color: new(tree.Color)('#f01') }` will become: + // + // new(tree.Rule)('@color', + // new(tree.Value)([ + // new(tree.Expression)([ + // new(tree.Color)('#f01') + // ]) + // ]) + // ) + // + if (typeof(variables) === 'object' && !Array.isArray(variables)) { + variables = Object.keys(variables).map(function (k) { + var value = variables[k]; + + if (! (value instanceof tree.Value)) { + if (! (value instanceof tree.Expression)) { + value = new(tree.Expression)([value]); + } + value = new(tree.Value)([value]); + } + return new(tree.Rule)('@' + k, value, false, 0); + }); + frames = [new(tree.Ruleset)(null, variables)]; + } + + try { + var css = evaluate.call(this, { frames: frames }) + .toCSS([], { compress: options.compress || false }); + } catch (e) { + lines = input.split('\n'); + line = getLine(e.index); + + for (var n = e.index, column = -1; + n >= 0 && input.charAt(n) !== '\n'; + n--) { column++ } + + throw { + type: e.type, + message: e.message, + filename: env.filename, + index: e.index, + line: typeof(line) === 'number' ? line + 1 : null, + callLine: e.call && (getLine(e.call) + 1), + callExtract: lines[getLine(e.call)], + stack: e.stack, + column: column, + extract: [ + lines[line - 1], + lines[line], + lines[line + 1] + ] + }; + } + if (options.compress) { + return css.replace(/(\s)+/g, "$1"); + } else { + return css; + } + + function getLine(index) { + return index ? (input.slice(0, index).match(/\n/g) || "").length : null; + } + }; + })(root.eval); + + // If `i` is smaller than the `input.length - 1`, + // it means the parser wasn't able to parse the whole + // string, so we've got a parsing error. + // + // We try to extract a \n delimited string, + // showing the line where the parse error occured. + // We split it up into two parts (the part which parsed, + // and the part which didn't), so we can color them differently. + if (i < input.length - 1) { + i = furthest; + lines = input.split('\n'); + line = (input.slice(0, i).match(/\n/g) || "").length + 1; + + for (var n = i, column = -1; n >= 0 && input.charAt(n) !== '\n'; n--) { column++ } + + error = { + name: "ParseError", + message: "Syntax Error on line " + line, + index: i, + filename: env.filename, + line: line, + column: column, + extract: [ + lines[line - 2], + lines[line - 1], + lines[line] + ] + }; + } + + if (this.imports.queue.length > 0) { + finish = function () { callback(error, root) }; + } else { + callback(error, root); + } + }, + + // + // Here in, the parsing rules/functions + // + // The basic structure of the syntax tree generated is as follows: + // + // Ruleset -> Rule -> Value -> Expression -> Entity + // + // Here's some LESS code: + // + // .class { + // color: #fff; + // border: 1px solid #000; + // width: @w + 4px; + // > .child {...} + // } + // + // And here's what the parse tree might look like: + // + // Ruleset (Selector '.class', [ + // Rule ("color", Value ([Expression [Color #fff]])) + // Rule ("border", Value ([Expression [Dimension 1px][Keyword "solid"][Color #000]])) + // Rule ("width", Value ([Expression [Operation "+" [Variable "@w"][Dimension 4px]]])) + // Ruleset (Selector [Element '>', '.child'], [...]) + // ]) + // + // In general, most rules will try to parse a token with the `$()` function, and if the return + // value is truly, will return a new node, of the relevant type. Sometimes, we need to check + // first, before parsing, that's when we use `peek()`. + // + parsers: { + // + // The `primary` rule is the *entry* and *exit* point of the parser. + // The rules here can appear at any level of the parse tree. + // + // The recursive nature of the grammar is an interplay between the `block` + // rule, which represents `{ ... }`, the `ruleset` rule, and this `primary` rule, + // as represented by this simplified grammar: + // + // primary → (ruleset | rule)+ + // ruleset → selector+ block + // block → '{' primary '}' + // + // Only at one point is the primary rule not called from the + // block rule: at the root level. + // + primary: function () { + var node, root = []; + + while ((node = $(this.mixin.definition) || $(this.rule) || $(this.ruleset) || + $(this.mixin.call) || $(this.comment) || $(this.directive)) + || $(/^[\s\n]+/)) { + node && root.push(node); + } + return root; + }, + + // We create a Comment node for CSS comments `/* */`, + // but keep the LeSS comments `//` silent, by just skipping + // over them. + comment: function () { + var comment; + + if (input.charAt(i) !== '/') return; + + if (input.charAt(i + 1) === '/') { + return new(tree.Comment)($(/^\/\/.*/), true); + } else if (comment = $(/^\/\*(?:[^*]|\*+[^\/*])*\*+\/\n?/)) { + return new(tree.Comment)(comment); + } + }, + + // + // Entities are tokens which can be found inside an Expression + // + entities: { + // + // A string, which supports escaping " and ' + // + // "milky way" 'he\'s the one!' + // + quoted: function () { + var str, j = i, e; + + if (input.charAt(j) === '~') { j++, e = true } // Escaped strings + if (input.charAt(j) !== '"' && input.charAt(j) !== "'") return; + + e && $('~'); + + if (str = $(/^"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'/)) { + return new(tree.Quoted)(str[0], str[1] || str[2], e); + } + }, + + // + // A catch-all word, such as: + // + // black border-collapse + // + keyword: function () { + var k; + if (k = $(/^[A-Za-z-]+/)) { return new(tree.Keyword)(k) } + }, + + // + // A function call + // + // rgb(255, 0, 255) + // + // We also try to catch IE's `alpha()`, but let the `alpha` parser + // deal with the details. + // + // The arguments are parsed with the `entities.arguments` parser. + // + call: function () { + var name, args; + + if (! (name = /^([\w-]+|%)\(/.exec(chunks[j]))) return; + + name = name[1].toLowerCase(); + + if (name === 'url') { return null } + else { i += name.length } + + if (name === 'alpha') { return $(this.alpha) } + + $('('); // Parse the '(' and consume whitespace. + + args = $(this.entities.arguments); + + if (! $(')')) return; + + if (name) { return new(tree.Call)(name, args) } + }, + arguments: function () { + var args = [], arg; + + while (arg = $(this.expression)) { + args.push(arg); + if (! $(',')) { break } + } + return args; + }, + literal: function () { + return $(this.entities.dimension) || + $(this.entities.color) || + $(this.entities.quoted); + }, + + // + // Parse url() tokens + // + // We use a specific rule for urls, because they don't really behave like + // standard function calls. The difference is that the argument doesn't have + // to be enclosed within a string, so it can't be parsed as an Expression. + // + url: function () { + var value; + + if (input.charAt(i) !== 'u' || !$(/^url\(/)) return; + value = $(this.entities.quoted) || $(this.entities.variable) || + $(this.entities.dataURI) || $(/^[-\w%@$\/.&=:;#+?~]+/) || ""; + if (! $(')')) throw new(Error)("missing closing ) for url()"); + + return new(tree.URL)((value.value || value.data || value instanceof tree.Variable) + ? value : new(tree.Anonymous)(value), imports.paths); + }, + + dataURI: function () { + var obj; + + if ($(/^data:/)) { + obj = {}; + obj.mime = $(/^[^\/]+\/[^,;)]+/) || ''; + obj.charset = $(/^;\s*charset=[^,;)]+/) || ''; + obj.base64 = $(/^;\s*base64/) || ''; + obj.data = $(/^,\s*[^)]+/); + + if (obj.data) { return obj } + } + }, + + // + // A Variable entity, such as `@fink`, in + // + // width: @fink + 2px + // + // We use a different parser for variable definitions, + // see `parsers.variable`. + // + variable: function () { + var name, index = i; + + if (input.charAt(i) === '@' && (name = $(/^@@?[\w-]+/))) { + return new(tree.Variable)(name, index); + } + }, + + // + // A Hexadecimal color + // + // #4F3C2F + // + // `rgb` and `hsl` colors are parsed through the `entities.call` parser. + // + color: function () { + var rgb; + + if (input.charAt(i) === '#' && (rgb = $(/^#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})/))) { + return new(tree.Color)(rgb[1]); + } + }, + + // + // A Dimension, that is, a number and a unit + // + // 0.5em 95% + // + dimension: function () { + var value, c = input.charCodeAt(i); + if ((c > 57 || c < 45) || c === 47) return; + + if (value = $(/^(-?\d*\.?\d+)(px|%|em|pc|ex|in|deg|s|ms|pt|cm|mm|rad|grad|turn)?/)) { + return new(tree.Dimension)(value[1], value[2]); + } + }, + + // + // JavaScript code to be evaluated + // + // `window.location.href` + // + javascript: function () { + var str, j = i, e; + + if (input.charAt(j) === '~') { j++, e = true } // Escaped strings + if (input.charAt(j) !== '`') { return } + + e && $('~'); + + if (str = $(/^`([^`]*)`/)) { + return new(tree.JavaScript)(str[1], i, e); + } + } + }, + + // + // The variable part of a variable definition. Used in the `rule` parser + // + // @fink: + // + variable: function () { + var name; + + if (input.charAt(i) === '@' && (name = $(/^(@[\w-]+)\s*:/))) { return name[1] } + }, + + // + // A font size/line-height shorthand + // + // small/12px + // + // We need to peek first, or we'll match on keywords and dimensions + // + shorthand: function () { + var a, b; + + if (! peek(/^[@\w.%-]+\/[@\w.-]+/)) return; + + if ((a = $(this.entity)) && $('/') && (b = $(this.entity))) { + return new(tree.Shorthand)(a, b); + } + }, + + // + // Mixins + // + mixin: { + // + // A Mixin call, with an optional argument list + // + // #mixins > .square(#fff); + // .rounded(4px, black); + // .button; + // + // The `while` loop is there because mixins can be + // namespaced, but we only support the child and descendant + // selector for now. + // + call: function () { + var elements = [], e, c, args, index = i, s = input.charAt(i); + + if (s !== '.' && s !== '#') { return } + + while (e = $(/^[#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/)) { + elements.push(new(tree.Element)(c, e)); + c = $('>'); + } + $('(') && (args = $(this.entities.arguments)) && $(')'); + + if (elements.length > 0 && ($(';') || peek('}'))) { + return new(tree.mixin.Call)(elements, args, index); + } + }, + + // + // A Mixin definition, with a list of parameters + // + // .rounded (@radius: 2px, @color) { + // ... + // } + // + // Until we have a finer grained state-machine, we have to + // do a look-ahead, to make sure we don't have a mixin call. + // See the `rule` function for more information. + // + // We start by matching `.rounded (`, and then proceed on to + // the argument list, which has optional default values. + // We store the parameters in `params`, with a `value` key, + // if there is a value, such as in the case of `@radius`. + // + // Once we've got our params list, and a closing `)`, we parse + // the `{...}` block. + // + definition: function () { + var name, params = [], match, ruleset, param, value; + + if ((input.charAt(i) !== '.' && input.charAt(i) !== '#') || + peek(/^[^{]*(;|})/)) return; + + if (match = $(/^([#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+)\s*\(/)) { + name = match[1]; + + while (param = $(this.entities.variable) || $(this.entities.literal) + || $(this.entities.keyword)) { + // Variable + if (param instanceof tree.Variable) { + if ($(':')) { + if (value = $(this.expression)) { + params.push({ name: param.name, value: value }); + } else { + throw new(Error)("Expected value"); + } + } else { + params.push({ name: param.name }); + } + } else { + params.push({ value: param }); + } + if (! $(',')) { break } + } + if (! $(')')) throw new(Error)("Expected )"); + + ruleset = $(this.block); + + if (ruleset) { + return new(tree.mixin.Definition)(name, params, ruleset); + } + } + } + }, + + // + // Entities are the smallest recognized token, + // and can be found inside a rule's value. + // + entity: function () { + return $(this.entities.literal) || $(this.entities.variable) || $(this.entities.url) || + $(this.entities.call) || $(this.entities.keyword) || $(this.entities.javascript) || + $(this.comment); + }, + + // + // A Rule terminator. Note that we use `peek()` to check for '}', + // because the `block` rule will be expecting it, but we still need to make sure + // it's there, if ';' was ommitted. + // + end: function () { + return $(';') || peek('}'); + }, + + // + // IE's alpha function + // + // alpha(opacity=88) + // + alpha: function () { + var value; + + if (! $(/^opacity=/i)) return; + if (value = $(/^\d+/) || $(this.entities.variable)) { + if (! $(')')) throw new(Error)("missing closing ) for alpha()"); + return new(tree.Alpha)(value); + } + }, + + // + // A Selector Element + // + // div + // + h1 + // #socks + // input[type="text"] + // + // Elements are the building blocks for Selectors, + // they are made out of a `Combinator` (see combinator rule), + // and an element name, such as a tag a class, or `*`. + // + element: function () { + var e, t, c; + + c = $(this.combinator); + e = $(/^(?:[.#]?|:*)(?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/) || $('*') || $(this.attribute) || $(/^\([^)@]+\)/); + + if (e) { return new(tree.Element)(c, e) } + }, + + // + // Combinators combine elements together, in a Selector. + // + // Because our parser isn't white-space sensitive, special care + // has to be taken, when parsing the descendant combinator, ` `, + // as it's an empty space. We have to check the previous character + // in the input, to see if it's a ` ` character. More info on how + // we deal with this in *combinator.js*. + // + combinator: function () { + var match, c = input.charAt(i); + + if (c === '>' || c === '&' || c === '+' || c === '~') { + i++; + while (input.charAt(i) === ' ') { i++ } + return new(tree.Combinator)(c); + } else if (c === ':' && input.charAt(i + 1) === ':') { + i += 2; + while (input.charAt(i) === ' ') { i++ } + return new(tree.Combinator)('::'); + } else if (input.charAt(i - 1) === ' ') { + return new(tree.Combinator)(" "); + } else { + return new(tree.Combinator)(null); + } + }, + + // + // A CSS Selector + // + // .class > div + h1 + // li a:hover + // + // Selectors are made out of one or more Elements, see above. + // + selector: function () { + var sel, e, elements = [], c, match; + + while (e = $(this.element)) { + c = input.charAt(i); + elements.push(e) + if (c === '{' || c === '}' || c === ';' || c === ',') { break } + } + + if (elements.length > 0) { return new(tree.Selector)(elements) } + }, + tag: function () { + return $(/^[a-zA-Z][a-zA-Z-]*[0-9]?/) || $('*'); + }, + attribute: function () { + var attr = '', key, val, op; + + if (! $('[')) return; + + if (key = $(/^[a-zA-Z-]+/) || $(this.entities.quoted)) { + if ((op = $(/^[|~*$^]?=/)) && + (val = $(this.entities.quoted) || $(/^[\w-]+/))) { + attr = [key, op, val.toCSS ? val.toCSS() : val].join(''); + } else { attr = key } + } + + if (! $(']')) return; + + if (attr) { return "[" + attr + "]" } + }, + + // + // The `block` rule is used by `ruleset` and `mixin.definition`. + // It's a wrapper around the `primary` rule, with added `{}`. + // + block: function () { + var content; + + if ($('{') && (content = $(this.primary)) && $('}')) { + return content; + } + }, + + // + // div, .class, body > p {...} + // + ruleset: function () { + var selectors = [], s, rules, match; + save(); + + if (match = /^([.#: \w-]+)[\s\n]*\{/.exec(chunks[j])) { + i += match[0].length - 1; + selectors = [new(tree.Selector)([new(tree.Element)(null, match[1])])]; + } else { + while (s = $(this.selector)) { + selectors.push(s); + $(this.comment); + if (! $(',')) { break } + $(this.comment); + } + } + + if (selectors.length > 0 && (rules = $(this.block))) { + return new(tree.Ruleset)(selectors, rules); + } else { + // Backtrack + furthest = i; + restore(); + } + }, + rule: function () { + var name, value, c = input.charAt(i), important, match; + save(); + + if (c === '.' || c === '#' || c === '&') { return } + + if (name = $(this.variable) || $(this.property)) { + if ((name.charAt(0) != '@') && (match = /^([^@+\/'"*`(;{}-]*);/.exec(chunks[j]))) { + i += match[0].length - 1; + value = new(tree.Anonymous)(match[1]); + } else if (name === "font") { + value = $(this.font); + } else { + value = $(this.value); + } + important = $(this.important); + + if (value && $(this.end)) { + return new(tree.Rule)(name, value, important, memo); + } else { + furthest = i; + restore(); + } + } + }, + + // + // An @import directive + // + // @import "lib"; + // + // Depending on our environemnt, importing is done differently: + // In the browser, it's an XHR request, in Node, it would be a + // file-system operation. The function used for importing is + // stored in `import`, which we pass to the Import constructor. + // + "import": function () { + var path; + if ($(/^@import\s+/) && + (path = $(this.entities.quoted) || $(this.entities.url)) && + $(';')) { + return new(tree.Import)(path, imports); + } + }, + + // + // A CSS Directive + // + // @charset "utf-8"; + // + directive: function () { + var name, value, rules, types; + + if (input.charAt(i) !== '@') return; + + if (value = $(this['import'])) { + return value; + } else if (name = $(/^@media|@page|@-[-a-z]+/)) { + types = ($(/^[^{]+/) || '').trim(); + if (rules = $(this.block)) { + return new(tree.Directive)(name + " " + types, rules); + } + } else if (name = $(/^@[-a-z]+/)) { + if (name === '@font-face') { + if (rules = $(this.block)) { + return new(tree.Directive)(name, rules); + } + } else if ((value = $(this.entity)) && $(';')) { + return new(tree.Directive)(name, value); + } + } + }, + font: function () { + var value = [], expression = [], weight, shorthand, font, e; + + while (e = $(this.shorthand) || $(this.entity)) { + expression.push(e); + } + value.push(new(tree.Expression)(expression)); + + if ($(',')) { + while (e = $(this.expression)) { + value.push(e); + if (! $(',')) { break } + } + } + return new(tree.Value)(value); + }, + + // + // A Value is a comma-delimited list of Expressions + // + // font-family: Baskerville, Georgia, serif; + // + // In a Rule, a Value represents everything after the `:`, + // and before the `;`. + // + value: function () { + var e, expressions = [], important; + + while (e = $(this.expression)) { + expressions.push(e); + if (! $(',')) { break } + } + + if (expressions.length > 0) { + return new(tree.Value)(expressions); + } + }, + important: function () { + if (input.charAt(i) === '!') { + return $(/^! *important/); + } + }, + sub: function () { + var e; + + if ($('(') && (e = $(this.expression)) && $(')')) { + return e; + } + }, + multiplication: function () { + var m, a, op, operation; + if (m = $(this.operand)) { + while ((op = ($('/') || $('*'))) && (a = $(this.operand))) { + operation = new(tree.Operation)(op, [operation || m, a]); + } + return operation || m; + } + }, + addition: function () { + var m, a, op, operation; + if (m = $(this.multiplication)) { + while ((op = $(/^[-+]\s+/) || (input.charAt(i - 1) != ' ' && ($('+') || $('-')))) && + (a = $(this.multiplication))) { + operation = new(tree.Operation)(op, [operation || m, a]); + } + return operation || m; + } + }, + + // + // An operand is anything that can be part of an operation, + // such as a Color, or a Variable + // + operand: function () { + var negate, p = input.charAt(i + 1); + + if (input.charAt(i) === '-' && (p === '@' || p === '(')) { negate = $('-') } + var o = $(this.sub) || $(this.entities.dimension) || + $(this.entities.color) || $(this.entities.variable) || + $(this.entities.call); + return negate ? new(tree.Operation)('*', [new(tree.Dimension)(-1), o]) + : o; + }, + + // + // Expressions either represent mathematical operations, + // or white-space delimited Entities. + // + // 1px solid black + // @var * 2 + // + expression: function () { + var e, delim, entities = [], d; + + while (e = $(this.addition) || $(this.entity)) { + entities.push(e); + } + if (entities.length > 0) { + return new(tree.Expression)(entities); + } + }, + property: function () { + var name; + + if (name = $(/^(\*?-?[-a-z_0-9]+)\s*:/)) { + return name[1]; + } + } + } + }; +}; + +if (typeof(window) !== 'undefined') { + // + // Used by `@import` directives + // + less.Parser.importer = function (path, paths, callback, env) { + if (path.charAt(0) !== '/' && paths.length > 0) { + path = paths[0] + path; + } + // We pass `true` as 3rd argument, to force the reload of the import. + // This is so we can get the syntax tree as opposed to just the CSS output, + // as we need this to evaluate the current stylesheet. + loadStyleSheet({ href: path, title: path, type: env.mime }, callback, true); + }; +} + +(function (tree) { + +tree.functions = { + rgb: function (r, g, b) { + return this.rgba(r, g, b, 1.0); + }, + rgba: function (r, g, b, a) { + var rgb = [r, g, b].map(function (c) { return number(c) }), + a = number(a); + return new(tree.Color)(rgb, a); + }, + hsl: function (h, s, l) { + return this.hsla(h, s, l, 1.0); + }, + hsla: function (h, s, l, a) { + h = (number(h) % 360) / 360; + s = number(s); l = number(l); a = number(a); + + var m2 = l <= 0.5 ? l * (s + 1) : l + s - l * s; + var m1 = l * 2 - m2; + + return this.rgba(hue(h + 1/3) * 255, + hue(h) * 255, + hue(h - 1/3) * 255, + a); + + function hue(h) { + h = h < 0 ? h + 1 : (h > 1 ? h - 1 : h); + if (h * 6 < 1) return m1 + (m2 - m1) * h * 6; + else if (h * 2 < 1) return m2; + else if (h * 3 < 2) return m1 + (m2 - m1) * (2/3 - h) * 6; + else return m1; + } + }, + hue: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().h)); + }, + saturation: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().s * 100), '%'); + }, + lightness: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().l * 100), '%'); + }, + alpha: function (color) { + return new(tree.Dimension)(color.toHSL().a); + }, + saturate: function (color, amount) { + var hsl = color.toHSL(); + + hsl.s += amount.value / 100; + hsl.s = clamp(hsl.s); + return hsla(hsl); + }, + desaturate: function (color, amount) { + var hsl = color.toHSL(); + + hsl.s -= amount.value / 100; + hsl.s = clamp(hsl.s); + return hsla(hsl); + }, + lighten: function (color, amount) { + var hsl = color.toHSL(); + + hsl.l += amount.value / 100; + hsl.l = clamp(hsl.l); + return hsla(hsl); + }, + darken: function (color, amount) { + var hsl = color.toHSL(); + + hsl.l -= amount.value / 100; + hsl.l = clamp(hsl.l); + return hsla(hsl); + }, + fadein: function (color, amount) { + var hsl = color.toHSL(); + + hsl.a += amount.value / 100; + hsl.a = clamp(hsl.a); + return hsla(hsl); + }, + fadeout: function (color, amount) { + var hsl = color.toHSL(); + + hsl.a -= amount.value / 100; + hsl.a = clamp(hsl.a); + return hsla(hsl); + }, + spin: function (color, amount) { + var hsl = color.toHSL(); + var hue = (hsl.h + amount.value) % 360; + + hsl.h = hue < 0 ? 360 + hue : hue; + + return hsla(hsl); + }, + // + // Copyright (c) 2006-2009 Hampton Catlin, Nathan Weizenbaum, and Chris Eppstein + // http://sass-lang.com + // + mix: function (color1, color2, weight) { + var p = weight.value / 100.0; + var w = p * 2 - 1; + var a = color1.toHSL().a - color2.toHSL().a; + + var w1 = (((w * a == -1) ? w : (w + a) / (1 + w * a)) + 1) / 2.0; + var w2 = 1 - w1; + + var rgb = [color1.rgb[0] * w1 + color2.rgb[0] * w2, + color1.rgb[1] * w1 + color2.rgb[1] * w2, + color1.rgb[2] * w1 + color2.rgb[2] * w2]; + + var alpha = color1.alpha * p + color2.alpha * (1 - p); + + return new(tree.Color)(rgb, alpha); + }, + greyscale: function (color) { + return this.desaturate(color, new(tree.Dimension)(100)); + }, + e: function (str) { + return new(tree.Anonymous)(str instanceof tree.JavaScript ? str.evaluated : str); + }, + escape: function (str) { + return new(tree.Anonymous)(encodeURI(str.value).replace(/=/g, "%3D").replace(/:/g, "%3A").replace(/#/g, "%23").replace(/;/g, "%3B").replace(/\(/g, "%28").replace(/\)/g, "%29")); + }, + '%': function (quoted /* arg, arg, ...*/) { + var args = Array.prototype.slice.call(arguments, 1), + str = quoted.value; + + for (var i = 0; i < args.length; i++) { + str = str.replace(/%[sda]/i, function(token) { + var value = token.match(/s/i) ? args[i].value : args[i].toCSS(); + return token.match(/[A-Z]$/) ? encodeURIComponent(value) : value; + }); + } + str = str.replace(/%%/g, '%'); + return new(tree.Quoted)('"' + str + '"', str); + }, + round: function (n) { + if (n instanceof tree.Dimension) { + return new(tree.Dimension)(Math.round(number(n)), n.unit); + } else if (typeof(n) === 'number') { + return Math.round(n); + } else { + throw { + error: "RuntimeError", + message: "math functions take numbers as parameters" + }; + } + } +}; + +function hsla(hsla) { + return tree.functions.hsla(hsla.h, hsla.s, hsla.l, hsla.a); +} + +function number(n) { + if (n instanceof tree.Dimension) { + return parseFloat(n.unit == '%' ? n.value / 100 : n.value); + } else if (typeof(n) === 'number') { + return n; + } else { + throw { + error: "RuntimeError", + message: "color functions take numbers as parameters" + }; + } +} + +function clamp(val) { + return Math.min(1, Math.max(0, val)); +} + +})(require('less/tree')); +(function (tree) { + +tree.Alpha = function (val) { + this.value = val; +}; +tree.Alpha.prototype = { + toCSS: function () { + return "alpha(opacity=" + + (this.value.toCSS ? this.value.toCSS() : this.value) + ")"; + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Anonymous = function (string) { + this.value = string.value || string; +}; +tree.Anonymous.prototype = { + toCSS: function () { + return this.value; + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +// +// A function call node. +// +tree.Call = function (name, args) { + this.name = name; + this.args = args; +}; +tree.Call.prototype = { + // + // When evaluating a function call, + // we either find the function in `tree.functions` [1], + // in which case we call it, passing the evaluated arguments, + // or we simply print it out as it appeared originally [2]. + // + // The *functions.js* file contains the built-in functions. + // + // The reason why we evaluate the arguments, is in the case where + // we try to pass a variable to a function, like: `saturate(@color)`. + // The function should receive the value, not the variable. + // + eval: function (env) { + var args = this.args.map(function (a) { return a.eval(env) }); + + if (this.name in tree.functions) { // 1. + return tree.functions[this.name].apply(tree.functions, args); + } else { // 2. + return new(tree.Anonymous)(this.name + + "(" + args.map(function (a) { return a.toCSS() }).join(', ') + ")"); + } + }, + + toCSS: function (env) { + return this.eval(env).toCSS(); + } +}; + +})(require('less/tree')); +(function (tree) { +// +// RGB Colors - #ff0014, #eee +// +tree.Color = function (rgb, a) { + // + // The end goal here, is to parse the arguments + // into an integer triplet, such as `128, 255, 0` + // + // This facilitates operations and conversions. + // + if (Array.isArray(rgb)) { + this.rgb = rgb; + } else if (rgb.length == 6) { + this.rgb = rgb.match(/.{2}/g).map(function (c) { + return parseInt(c, 16); + }); + } else if (rgb.length == 8) { + this.alpha = parseInt(rgb.substring(0,2), 16) / 255.0; + this.rgb = rgb.substr(2).match(/.{2}/g).map(function (c) { + return parseInt(c, 16); + }); + } else { + this.rgb = rgb.split('').map(function (c) { + return parseInt(c + c, 16); + }); + } + this.alpha = typeof(a) === 'number' ? a : 1; +}; +tree.Color.prototype = { + eval: function () { return this }, + + // + // If we have some transparency, the only way to represent it + // is via `rgba`. Otherwise, we use the hex representation, + // which has better compatibility with older browsers. + // Values are capped between `0` and `255`, rounded and zero-padded. + // + toCSS: function () { + if (this.alpha < 1.0) { + return "rgba(" + this.rgb.map(function (c) { + return Math.round(c); + }).concat(this.alpha).join(', ') + ")"; + } else { + return '#' + this.rgb.map(function (i) { + i = Math.round(i); + i = (i > 255 ? 255 : (i < 0 ? 0 : i)).toString(16); + return i.length === 1 ? '0' + i : i; + }).join(''); + } + }, + + // + // Operations have to be done per-channel, if not, + // channels will spill onto each other. Once we have + // our result, in the form of an integer triplet, + // we create a new Color node to hold the result. + // + operate: function (op, other) { + var result = []; + + if (! (other instanceof tree.Color)) { + other = other.toColor(); + } + + for (var c = 0; c < 3; c++) { + result[c] = tree.operate(op, this.rgb[c], other.rgb[c]); + } + return new(tree.Color)(result, this.alpha + other.alpha); + }, + + toHSL: function () { + var r = this.rgb[0] / 255, + g = this.rgb[1] / 255, + b = this.rgb[2] / 255, + a = this.alpha; + + var max = Math.max(r, g, b), min = Math.min(r, g, b); + var h, s, l = (max + min) / 2, d = max - min; + + if (max === min) { + h = s = 0; + } else { + s = l > 0.5 ? d / (2 - max - min) : d / (max + min); + + switch (max) { + case r: h = (g - b) / d + (g < b ? 6 : 0); break; + case g: h = (b - r) / d + 2; break; + case b: h = (r - g) / d + 4; break; + } + h /= 6; + } + return { h: h * 360, s: s, l: l, a: a }; + } +}; + + +})(require('less/tree')); +(function (tree) { + +tree.Comment = function (value, silent) { + this.value = value; + this.silent = !!silent; +}; +tree.Comment.prototype = { + toCSS: function (env) { + return env.compress ? '' : this.value; + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +// +// A number with a unit +// +tree.Dimension = function (value, unit) { + this.value = parseFloat(value); + this.unit = unit || null; +}; + +tree.Dimension.prototype = { + eval: function () { return this }, + toColor: function () { + return new(tree.Color)([this.value, this.value, this.value]); + }, + toCSS: function () { + var css = this.value + this.unit; + return css; + }, + + // In an operation between two Dimensions, + // we default to the first Dimension's unit, + // so `1px + 2em` will yield `3px`. + // In the future, we could implement some unit + // conversions such that `100cm + 10mm` would yield + // `101cm`. + operate: function (op, other) { + return new(tree.Dimension) + (tree.operate(op, this.value, other.value), + this.unit || other.unit); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Directive = function (name, value) { + this.name = name; + if (Array.isArray(value)) { + this.ruleset = new(tree.Ruleset)([], value); + } else { + this.value = value; + } +}; +tree.Directive.prototype = { + toCSS: function (ctx, env) { + if (this.ruleset) { + this.ruleset.root = true; + return this.name + (env.compress ? '{' : ' {\n ') + + this.ruleset.toCSS(ctx, env).trim().replace(/\n/g, '\n ') + + (env.compress ? '}': '\n}\n'); + } else { + return this.name + ' ' + this.value.toCSS() + ';\n'; + } + }, + eval: function (env) { + env.frames.unshift(this); + this.ruleset = this.ruleset && this.ruleset.eval(env); + env.frames.shift(); + return this; + }, + variable: function (name) { return tree.Ruleset.prototype.variable.call(this.ruleset, name) }, + find: function () { return tree.Ruleset.prototype.find.apply(this.ruleset, arguments) }, + rulesets: function () { return tree.Ruleset.prototype.rulesets.apply(this.ruleset) } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Element = function (combinator, value) { + this.combinator = combinator instanceof tree.Combinator ? + combinator : new(tree.Combinator)(combinator); + this.value = value.trim(); +}; +tree.Element.prototype.toCSS = function (env) { + return this.combinator.toCSS(env || {}) + this.value; +}; + +tree.Combinator = function (value) { + if (value === ' ') { + this.value = ' '; + } else { + this.value = value ? value.trim() : ""; + } +}; +tree.Combinator.prototype.toCSS = function (env) { + return { + '' : '', + ' ' : ' ', + '&' : '', + ':' : ' :', + '::': '::', + '+' : env.compress ? '+' : ' + ', + '~' : env.compress ? '~' : ' ~ ', + '>' : env.compress ? '>' : ' > ' + }[this.value]; +}; + +})(require('less/tree')); +(function (tree) { + +tree.Expression = function (value) { this.value = value }; +tree.Expression.prototype = { + eval: function (env) { + if (this.value.length > 1) { + return new(tree.Expression)(this.value.map(function (e) { + return e.eval(env); + })); + } else { + return this.value[0].eval(env); + } + }, + toCSS: function (env) { + return this.value.map(function (e) { + return e.toCSS(env); + }).join(' '); + } +}; + +})(require('less/tree')); +(function (tree) { +// +// CSS @import node +// +// The general strategy here is that we don't want to wait +// for the parsing to be completed, before we start importing +// the file. That's because in the context of a browser, +// most of the time will be spent waiting for the server to respond. +// +// On creation, we push the import path to our import queue, though +// `import,push`, we also pass it a callback, which it'll call once +// the file has been fetched, and parsed. +// +tree.Import = function (path, imports) { + var that = this; + + this._path = path; + + // The '.less' extension is optional + if (path instanceof tree.Quoted) { + this.path = /\.(le?|c)ss$/.test(path.value) ? path.value : path.value + '.less'; + } else { + this.path = path.value.value || path.value; + } + + this.css = /css$/.test(this.path); + + // Only pre-compile .less files + if (! this.css) { + imports.push(this.path, function (root) { + if (! root) { + throw new(Error)("Error parsing " + that.path); + } + that.root = root; + }); + } +}; + +// +// The actual import node doesn't return anything, when converted to CSS. +// The reason is that it's used at the evaluation stage, so that the rules +// it imports can be treated like any other rules. +// +// In `eval`, we make sure all Import nodes get evaluated, recursively, so +// we end up with a flat structure, which can easily be imported in the parent +// ruleset. +// +tree.Import.prototype = { + toCSS: function () { + if (this.css) { + return "@import " + this._path.toCSS() + ';\n'; + } else { + return ""; + } + }, + eval: function (env) { + var ruleset; + + if (this.css) { + return this; + } else { + ruleset = new(tree.Ruleset)(null, this.root.rules.slice(0)); + + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.Import) { + Array.prototype + .splice + .apply(ruleset.rules, + [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + return ruleset.rules; + } + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.JavaScript = function (string, index, escaped) { + this.escaped = escaped; + this.expression = string; + this.index = index; +}; +tree.JavaScript.prototype = { + toCSS: function () { + if (this.escaped) { + return this.evaluated; + } else { + return JSON.stringify(this.evaluated); + } + }, + eval: function (env) { + var result, + context = {}; + + var expression = this.expression.replace(/@\{([\w-]+)\}/g, function (_, name) { + return new(tree.Variable)('@' + name).eval(env).value; + }); + + expression = new(Function)('return (' + expression + ')'); + + for (var k in env.frames[0].variables()) { + context[k.slice(1)] = { + value: env.frames[0].variables()[k].value, + toJS: function () { + return this.value.eval(env).toCSS(); + } + }; + } + + try { + this.evaluated = expression.call(context); + } catch (e) { + throw { message: "JavaScript evaluation error: '" + e.name + ': ' + e.message + "'" , + index: this.index }; + } + return this; + } +}; + +})(require('less/tree')); + +(function (tree) { + +tree.Keyword = function (value) { this.value = value }; +tree.Keyword.prototype = { + eval: function () { return this }, + toCSS: function () { return this.value } +}; + +})(require('less/tree')); +(function (tree) { + +tree.mixin = {}; +tree.mixin.Call = function (elements, args, index) { + this.selector = new(tree.Selector)(elements); + this.arguments = args; + this.index = index; +}; +tree.mixin.Call.prototype = { + eval: function (env) { + var mixins, rules = [], match = false; + + for (var i = 0; i < env.frames.length; i++) { + if ((mixins = env.frames[i].find(this.selector)).length > 0) { + for (var m = 0; m < mixins.length; m++) { + if (mixins[m].match(this.arguments, env)) { + try { + Array.prototype.push.apply( + rules, mixins[m].eval(env, this.arguments).rules); + match = true; + } catch (e) { + throw { message: e.message, index: e.index, stack: e.stack, call: this.index }; + } + } + } + if (match) { + return rules; + } else { + throw { message: 'No matching definition was found for `' + + this.selector.toCSS().trim() + '(' + + this.arguments.map(function (a) { + return a.toCSS(); + }).join(', ') + ")`", + index: this.index }; + } + } + } + throw { message: this.selector.toCSS().trim() + " is undefined", + index: this.index }; + } +}; + +tree.mixin.Definition = function (name, params, rules) { + this.name = name; + this.selectors = [new(tree.Selector)([new(tree.Element)(null, name)])]; + this.params = params; + this.arity = params.length; + this.rules = rules; + this._lookups = {}; + this.required = params.reduce(function (count, p) { + if (!p.name || (p.name && !p.value)) { return count + 1 } + else { return count } + }, 0); + this.parent = tree.Ruleset.prototype; + this.frames = []; +}; +tree.mixin.Definition.prototype = { + toCSS: function () { return "" }, + variable: function (name) { return this.parent.variable.call(this, name) }, + variables: function () { return this.parent.variables.call(this) }, + find: function () { return this.parent.find.apply(this, arguments) }, + rulesets: function () { return this.parent.rulesets.apply(this) }, + + eval: function (env, args) { + var frame = new(tree.Ruleset)(null, []), context, _arguments = []; + + for (var i = 0, val; i < this.params.length; i++) { + if (this.params[i].name) { + if (val = (args && args[i]) || this.params[i].value) { + frame.rules.unshift(new(tree.Rule)(this.params[i].name, val.eval(env))); + } else { + throw { message: "wrong number of arguments for " + this.name + + ' (' + args.length + ' for ' + this.arity + ')' }; + } + } + } + for (var i = 0; i < Math.max(this.params.length, args && args.length); i++) { + _arguments.push(args[i] || this.params[i].value); + } + frame.rules.unshift(new(tree.Rule)('@arguments', new(tree.Expression)(_arguments))); + + return new(tree.Ruleset)(null, this.rules.slice(0)).eval({ + frames: [this, frame].concat(this.frames, env.frames) + }); + }, + match: function (args, env) { + var argsLength = (args && args.length) || 0, len; + + if (argsLength < this.required) { return false } + if ((this.required > 0) && (argsLength > this.params.length)) { return false } + + len = Math.min(argsLength, this.arity); + + for (var i = 0; i < len; i++) { + if (!this.params[i].name) { + if (args[i].eval(env).toCSS() != this.params[i].value.eval(env).toCSS()) { + return false; + } + } + } + return true; + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Operation = function (op, operands) { + this.op = op.trim(); + this.operands = operands; +}; +tree.Operation.prototype.eval = function (env) { + var a = this.operands[0].eval(env), + b = this.operands[1].eval(env), + temp; + + if (a instanceof tree.Dimension && b instanceof tree.Color) { + if (this.op === '*' || this.op === '+') { + temp = b, b = a, a = temp; + } else { + throw { name: "OperationError", + message: "Can't substract or divide a color from a number" }; + } + } + return a.operate(this.op, b); +}; + +tree.operate = function (op, a, b) { + switch (op) { + case '+': return a + b; + case '-': return a - b; + case '*': return a * b; + case '/': return a / b; + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Quoted = function (str, content, escaped, i) { + this.escaped = escaped; + this.value = content || ''; + this.quote = str.charAt(0); + this.index = i; +}; +tree.Quoted.prototype = { + toCSS: function () { + if (this.escaped) { + return this.value; + } else { + return this.quote + this.value + this.quote; + } + }, + eval: function (env) { + this.value = this.value.replace(/@\{([\w-]+)\}/g, function (_, name) { + return new(tree.Variable)('@' + name).eval(env).value; + }).replace(/`([^`]+)`/g, function (_, exp) { + return new(tree.JavaScript)(exp, this.index, true).eval(env).toCSS(); + }); + return this; + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Rule = function (name, value, important, index) { + this.name = name; + this.value = (value instanceof tree.Value) ? value : new(tree.Value)([value]); + this.important = important ? ' ' + important.trim() : ''; + this.index = index; + + if (name.charAt(0) === '@') { + this.variable = true; + } else { this.variable = false } +}; +tree.Rule.prototype.toCSS = function (env) { + if (this.variable) { return "" } + else { + return this.name + (env.compress ? ':' : ': ') + + this.value.toCSS(env) + + this.important + ";"; + } +}; + +tree.Rule.prototype.eval = function (context) { + return new(tree.Rule)(this.name, this.value.eval(context), this.important, this.index); +}; + +tree.Shorthand = function (a, b) { + this.a = a; + this.b = b; +}; + +tree.Shorthand.prototype = { + toCSS: function (env) { + return this.a.toCSS(env) + "/" + this.b.toCSS(env); + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Ruleset = function (selectors, rules) { + this.selectors = selectors; + this.rules = rules; + this._lookups = {}; +}; +tree.Ruleset.prototype = { + eval: function (env) { + var ruleset = new(tree.Ruleset)(this.selectors, this.rules.slice(0)); + + ruleset.root = this.root; + + // push the current ruleset to the frames stack + env.frames.unshift(ruleset); + + // Evaluate imports + if (ruleset.root) { + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.Import) { + Array.prototype.splice + .apply(ruleset.rules, [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + } + + // Store the frames around mixin definitions, + // so they can be evaluated like closures when the time comes. + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.mixin.Definition) { + ruleset.rules[i].frames = env.frames.slice(0); + } + } + + // Evaluate mixin calls. + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.mixin.Call) { + Array.prototype.splice + .apply(ruleset.rules, [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + + // Evaluate everything else + for (var i = 0, rule; i < ruleset.rules.length; i++) { + rule = ruleset.rules[i]; + + if (! (rule instanceof tree.mixin.Definition)) { + ruleset.rules[i] = rule.eval ? rule.eval(env) : rule; + } + } + + // Pop the stack + env.frames.shift(); + + return ruleset; + }, + match: function (args) { + return !args || args.length === 0; + }, + variables: function () { + if (this._variables) { return this._variables } + else { + return this._variables = this.rules.reduce(function (hash, r) { + if (r instanceof tree.Rule && r.variable === true) { + hash[r.name] = r; + } + return hash; + }, {}); + } + }, + variable: function (name) { + return this.variables()[name]; + }, + rulesets: function () { + if (this._rulesets) { return this._rulesets } + else { + return this._rulesets = this.rules.filter(function (r) { + return (r instanceof tree.Ruleset) || (r instanceof tree.mixin.Definition); + }); + } + }, + find: function (selector, self) { + self = self || this; + var rules = [], rule, match, + key = selector.toCSS(); + + if (key in this._lookups) { return this._lookups[key] } + + this.rulesets().forEach(function (rule) { + if (rule !== self) { + for (var j = 0; j < rule.selectors.length; j++) { + if (match = selector.match(rule.selectors[j])) { + if (selector.elements.length > 1) { + Array.prototype.push.apply(rules, rule.find( + new(tree.Selector)(selector.elements.slice(1)), self)); + } else { + rules.push(rule); + } + break; + } + } + } + }); + return this._lookups[key] = rules; + }, + // + // Entry point for code generation + // + // `context` holds an array of arrays. + // + toCSS: function (context, env) { + var css = [], // The CSS output + rules = [], // node.Rule instances + rulesets = [], // node.Ruleset instances + paths = [], // Current selectors + selector, // The fully rendered selector + rule; + + if (! this.root) { + if (context.length === 0) { + paths = this.selectors.map(function (s) { return [s] }); + } else { + for (var s = 0; s < this.selectors.length; s++) { + for (var c = 0; c < context.length; c++) { + paths.push(context[c].concat([this.selectors[s]])); + } + } + } + } + + // Compile rules and rulesets + for (var i = 0; i < this.rules.length; i++) { + rule = this.rules[i]; + + if (rule.rules || (rule instanceof tree.Directive)) { + rulesets.push(rule.toCSS(paths, env)); + } else if (rule instanceof tree.Comment) { + if (!rule.silent) { + if (this.root) { + rulesets.push(rule.toCSS(env)); + } else { + rules.push(rule.toCSS(env)); + } + } + } else { + if (rule.toCSS && !rule.variable) { + rules.push(rule.toCSS(env)); + } else if (rule.value && !rule.variable) { + rules.push(rule.value.toString()); + } + } + } + + rulesets = rulesets.join(''); + + // If this is the root node, we don't render + // a selector, or {}. + // Otherwise, only output if this ruleset has rules. + if (this.root) { + css.push(rules.join(env.compress ? '' : '\n')); + } else { + if (rules.length > 0) { + selector = paths.map(function (p) { + return p.map(function (s) { + return s.toCSS(env); + }).join('').trim(); + }).join(env.compress ? ',' : (paths.length > 3 ? ',\n' : ', ')); + css.push(selector, + (env.compress ? '{' : ' {\n ') + + rules.join(env.compress ? '' : '\n ') + + (env.compress ? '}' : '\n}\n')); + } + } + css.push(rulesets); + + return css.join('') + (env.compress ? '\n' : ''); + } +}; +})(require('less/tree')); +(function (tree) { + +tree.Selector = function (elements) { + this.elements = elements; + if (this.elements[0].combinator.value === "") { + this.elements[0].combinator.value = ' '; + } +}; +tree.Selector.prototype.match = function (other) { + if (this.elements[0].value === other.elements[0].value) { + return true; + } else { + return false; + } +}; +tree.Selector.prototype.toCSS = function (env) { + if (this._css) { return this._css } + + return this._css = this.elements.map(function (e) { + if (typeof(e) === 'string') { + return ' ' + e.trim(); + } else { + return e.toCSS(env); + } + }).join(''); +}; + +})(require('less/tree')); +(function (tree) { + +tree.URL = function (val, paths) { + if (val.data) { + this.attrs = val; + } else { + // Add the base path if the URL is relative and we are in the browser + if (!/^(?:https?:\/|file:\/|data:\/)?\//.test(val.value) && paths.length > 0 && typeof(window) !== 'undefined') { + val.value = paths[0] + (val.value.charAt(0) === '/' ? val.value.slice(1) : val.value); + } + this.value = val; + this.paths = paths; + } +}; +tree.URL.prototype = { + toCSS: function () { + return "url(" + (this.attrs ? 'data:' + this.attrs.mime + this.attrs.charset + this.attrs.base64 + this.attrs.data + : this.value.toCSS()) + ")"; + }, + eval: function (ctx) { + return this.attrs ? this : new(tree.URL)(this.value.eval(ctx), this.paths); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Value = function (value) { + this.value = value; + this.is = 'value'; +}; +tree.Value.prototype = { + eval: function (env) { + if (this.value.length === 1) { + return this.value[0].eval(env); + } else { + return new(tree.Value)(this.value.map(function (v) { + return v.eval(env); + })); + } + }, + toCSS: function (env) { + return this.value.map(function (e) { + return e.toCSS(env); + }).join(env.compress ? ',' : ', '); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Variable = function (name, index) { this.name = name, this.index = index }; +tree.Variable.prototype = { + eval: function (env) { + var variable, v, name = this.name; + + if (name.indexOf('@@') == 0) { + name = '@' + new(tree.Variable)(name.slice(1)).eval(env).value; + } + + if (variable = tree.find(env.frames, function (frame) { + if (v = frame.variable(name)) { + return v.value.eval(env); + } + })) { return variable } + else { + throw { message: "variable " + name + " is undefined", + index: this.index }; + } + } +}; + +})(require('less/tree')); +require('less/tree').find = function (obj, fun) { + for (var i = 0, r; i < obj.length; i++) { + if (r = fun.call(obj, obj[i])) { return r } + } + return null; +}; +// +// browser.js - client-side engine +// + +var isFileProtocol = (location.protocol === 'file:' || + location.protocol === 'chrome:' || + location.protocol === 'chrome-extension:' || + location.protocol === 'resource:'); + +less.env = less.env || (location.hostname == '127.0.0.1' || + location.hostname == '0.0.0.0' || + location.hostname == 'localhost' || + location.port.length > 0 || + isFileProtocol ? 'development' + : 'production'); + +// Load styles asynchronously (default: false) +// +// This is set to `false` by default, so that the body +// doesn't start loading before the stylesheets are parsed. +// Setting this to `true` can result in flickering. +// +less.async = false; + +// Interval between watch polls +less.poll = less.poll || (isFileProtocol ? 1000 : 1500); + +// +// Watch mode +// +less.watch = function () { return this.watchMode = true }; +less.unwatch = function () { return this.watchMode = false }; + +if (less.env === 'development') { + less.optimization = 0; + + if (/!watch/.test(location.hash)) { + less.watch(); + } + less.watchTimer = setInterval(function () { + if (less.watchMode) { + loadStyleSheets(function (root, sheet, env) { + if (root) { + createCSS(root.toCSS(), sheet, env.lastModified); + } + }); + } + }, less.poll); +} else { + less.optimization = 3; +} + +var cache; + +try { + cache = (typeof(window.localStorage) === 'undefined') ? null : window.localStorage; +} catch (_) { + cache = null; +} + +// +// Get all tags with the 'rel' attribute set to "stylesheet/less" +// +var links = document.getElementsByTagName('link'); +var typePattern = /^text\/(x-)?less$/; + +less.sheets = []; + +for (var i = 0; i < links.length; i++) { + if (links[i].rel === 'stylesheet/less' || (links[i].rel.match(/stylesheet/) && + (links[i].type.match(typePattern)))) { + less.sheets.push(links[i]); + } +} + + +less.refresh = function (reload) { + var startTime, endTime; + startTime = endTime = new(Date); + + loadStyleSheets(function (root, sheet, env) { + if (env.local) { + log("loading " + sheet.href + " from cache."); + } else { + log("parsed " + sheet.href + " successfully."); + createCSS(root.toCSS(), sheet, env.lastModified); + } + log("css for " + sheet.href + " generated in " + (new(Date) - endTime) + 'ms'); + (env.remaining === 0) && log("css generated in " + (new(Date) - startTime) + 'ms'); + endTime = new(Date); + }, reload); + + loadStyles(); +}; +less.refreshStyles = loadStyles; + +less.refresh(less.env === 'development'); + +function loadStyles() { + var styles = document.getElementsByTagName('style'); + for (var i = 0; i < styles.length; i++) { + if (styles[i].type.match(typePattern)) { + new(less.Parser)().parse(styles[i].innerHTML || '', function (e, tree) { + styles[i].type = 'text/css'; + styles[i].innerHTML = tree.toCSS(); + }); + } + } +} + +function loadStyleSheets(callback, reload) { + for (var i = 0; i < less.sheets.length; i++) { + loadStyleSheet(less.sheets[i], callback, reload, less.sheets.length - (i + 1)); + } +} + +function loadStyleSheet(sheet, callback, reload, remaining) { + var url = window.location.href.replace(/[#?].*$/, ''); + var href = sheet.href.replace(/\?.*$/, ''); + var css = cache && cache.getItem(href); + var timestamp = cache && cache.getItem(href + ':timestamp'); + var styles = { css: css, timestamp: timestamp }; + + // Stylesheets in IE don't always return the full path + if (! /^(https?|file):/.test(href)) { + if (href.charAt(0) == "/") { + href = window.location.protocol + "//" + window.location.host + href; + } else { + href = url.slice(0, url.lastIndexOf('/') + 1) + href; + } + } + + xhr(sheet.href, sheet.type, function (data, lastModified) { + if (!reload && styles && lastModified && + (new(Date)(lastModified).valueOf() === + new(Date)(styles.timestamp).valueOf())) { + // Use local copy + createCSS(styles.css, sheet); + callback(null, sheet, { local: true, remaining: remaining }); + } else { + // Use remote copy (re-parse) + try { + new(less.Parser)({ + optimization: less.optimization, + paths: [href.replace(/[\w\.-]+$/, '')], + mime: sheet.type + }).parse(data, function (e, root) { + if (e) { return error(e, href) } + try { + callback(root, sheet, { local: false, lastModified: lastModified, remaining: remaining }); + removeNode(document.getElementById('less-error-message:' + extractId(href))); + } catch (e) { + error(e, href); + } + }); + } catch (e) { + error(e, href); + } + } + }, function (status, url) { + throw new(Error)("Couldn't load " + url + " (" + status + ")"); + }); +} + +function extractId(href) { + return href.replace(/^[a-z]+:\/\/?[^\/]+/, '' ) // Remove protocol & domain + .replace(/^\//, '' ) // Remove root / + .replace(/\?.*$/, '' ) // Remove query + .replace(/\.[^\.\/]+$/, '' ) // Remove file extension + .replace(/[^\.\w-]+/g, '-') // Replace illegal characters + .replace(/\./g, ':'); // Replace dots with colons(for valid id) +} + +function createCSS(styles, sheet, lastModified) { + var css; + + // Strip the query-string + var href = sheet.href ? sheet.href.replace(/\?.*$/, '') : ''; + + // If there is no title set, use the filename, minus the extension + var id = 'less:' + (sheet.title || extractId(href)); + + // If the stylesheet doesn't exist, create a new node + if ((css = document.getElementById(id)) === null) { + css = document.createElement('style'); + css.type = 'text/css'; + css.media = sheet.media || 'screen'; + css.id = id; + document.getElementsByTagName('head')[0].appendChild(css); + } + + if (css.styleSheet) { // IE + try { + css.styleSheet.cssText = styles; + } catch (e) { + throw new(Error)("Couldn't reassign styleSheet.cssText."); + } + } else { + (function (node) { + if (css.childNodes.length > 0) { + if (css.firstChild.nodeValue !== node.nodeValue) { + css.replaceChild(node, css.firstChild); + } + } else { + css.appendChild(node); + } + })(document.createTextNode(styles)); + } + + // Don't update the local store if the file wasn't modified + if (lastModified && cache) { + log('saving ' + href + ' to cache.'); + cache.setItem(href, styles); + cache.setItem(href + ':timestamp', lastModified); + } +} + +function xhr(url, type, callback, errback) { + var xhr = getXMLHttpRequest(); + var async = isFileProtocol ? false : less.async; + + if (typeof(xhr.overrideMimeType) === 'function') { + xhr.overrideMimeType('text/css'); + } + xhr.open('GET', url, async); + xhr.setRequestHeader('Accept', type || 'text/x-less, text/css; q=0.9, */*; q=0.5'); + xhr.send(null); + + if (isFileProtocol) { + if (xhr.status === 0) { + callback(xhr.responseText); + } else { + errback(xhr.status, url); + } + } else if (async) { + xhr.onreadystatechange = function () { + if (xhr.readyState == 4) { + handleResponse(xhr, callback, errback); + } + }; + } else { + handleResponse(xhr, callback, errback); + } + + function handleResponse(xhr, callback, errback) { + if (xhr.status >= 200 && xhr.status < 300) { + callback(xhr.responseText, + xhr.getResponseHeader("Last-Modified")); + } else if (typeof(errback) === 'function') { + errback(xhr.status, url); + } + } +} + +function getXMLHttpRequest() { + if (window.XMLHttpRequest) { + return new(XMLHttpRequest); + } else { + try { + return new(ActiveXObject)("MSXML2.XMLHTTP.3.0"); + } catch (e) { + log("browser doesn't support AJAX."); + return null; + } + } +} + +function removeNode(node) { + return node && node.parentNode.removeChild(node); +} + +function log(str) { + if (less.env == 'development' && typeof(console) !== "undefined") { console.log('less: ' + str) } +} + +function error(e, href) { + var id = 'less-error-message:' + extractId(href); + + var template = ['
      ', + '
    • {0}
    • ', + '
    • {current}
    • ', + '
    • {2}
    • ', + '
    '].join('\n'); + + var elem = document.createElement('div'), timer, content; + + elem.id = id; + elem.className = "less-error-message"; + + content = '

    ' + (e.message || 'There is an error in your .less file') + + '

    ' + '

    ' + href + " "; + + if (e.extract) { + content += 'on line ' + e.line + ', column ' + (e.column + 1) + ':

    ' + + template.replace(/\[(-?\d)\]/g, function (_, i) { + return (parseInt(e.line) + parseInt(i)) || ''; + }).replace(/\{(\d)\}/g, function (_, i) { + return e.extract[parseInt(i)] || ''; + }).replace(/\{current\}/, e.extract[1].slice(0, e.column) + '' + + e.extract[1].slice(e.column) + ''); + } + elem.innerHTML = content; + + // CSS for error messages + createCSS([ + '.less-error-message ul, .less-error-message li {', + 'list-style-type: none;', + 'margin-right: 15px;', + 'padding: 4px 0;', + 'margin: 0;', + '}', + '.less-error-message label {', + 'font-size: 12px;', + 'margin-right: 15px;', + 'padding: 4px 0;', + 'color: #cc7777;', + '}', + '.less-error-message pre {', + 'color: #ee4444;', + 'padding: 4px 0;', + 'margin: 0;', + 'display: inline-block;', + '}', + '.less-error-message pre.ctx {', + 'color: #dd4444;', + '}', + '.less-error-message h3 {', + 'font-size: 20px;', + 'font-weight: bold;', + 'padding: 15px 0 5px 0;', + 'margin: 0;', + '}', + '.less-error-message a {', + 'color: #10a', + '}', + '.less-error-message .error {', + 'color: red;', + 'font-weight: bold;', + 'padding-bottom: 2px;', + 'border-bottom: 1px dashed red;', + '}' + ].join('\n'), { title: 'error-message' }); + + elem.style.cssText = [ + "font-family: Arial, sans-serif", + "border: 1px solid #e00", + "background-color: #eee", + "border-radius: 5px", + "-webkit-border-radius: 5px", + "-moz-border-radius: 5px", + "color: #e00", + "padding: 15px", + "margin-bottom: 15px" + ].join(';'); + + if (less.env == 'development') { + timer = setInterval(function () { + if (document.body) { + if (document.getElementById(id)) { + document.body.replaceChild(elem, document.getElementById(id)); + } else { + document.body.insertBefore(elem, document.body.firstChild); + } + clearInterval(timer); + } + }, 10); + } +} + +})(window); diff --git a/src/dashboard/src/media/vendor/less.js/dist/less-1.1.0.min.js b/src/dashboard/src/media/vendor/less.js/dist/less-1.1.0.min.js new file mode 100644 index 0000000000..ede454e103 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/dist/less-1.1.0.min.js @@ -0,0 +1,16 @@ +// +// LESS - Leaner CSS v1.1.0 +// http://lesscss.org +// +// Copyright (c) 2009-2011, Alexis Sellier +// Licensed under the Apache 2.0 License. +// +// +// LESS - Leaner CSS v1.1.0 +// http://lesscss.org +// +// Copyright (c) 2009-2011, Alexis Sellier +// Licensed under the Apache 2.0 License. +// +(function(a,b){function v(a,b){var c="less-error-message:"+p(b),e=["
      ",'
    • {0}
    • ',"
    • {current}
    • ",'
    • {2}
    • ',"
    "].join("\n"),f=document.createElement("div"),g,h;f.id=c,f.className="less-error-message",h="

    "+(a.message||"There is an error in your .less file")+"

    "+'

    '+b+" ",a.extract&&(h+="on line "+a.line+", column "+(a.column+1)+":

    "+e.replace(/\[(-?\d)\]/g,function(b,c){return parseInt(a.line)+parseInt(c)||""}).replace(/\{(\d)\}/g,function(b,c){return a.extract[parseInt(c)]||""}).replace(/\{current\}/,a.extract[1].slice(0,a.column)+''+a.extract[1].slice(a.column)+"")),f.innerHTML=h,q([".less-error-message ul, .less-error-message li {","list-style-type: none;","margin-right: 15px;","padding: 4px 0;","margin: 0;","}",".less-error-message label {","font-size: 12px;","margin-right: 15px;","padding: 4px 0;","color: #cc7777;","}",".less-error-message pre {","color: #ee4444;","padding: 4px 0;","margin: 0;","display: inline-block;","}",".less-error-message pre.ctx {","color: #dd4444;","}",".less-error-message h3 {","font-size: 20px;","font-weight: bold;","padding: 15px 0 5px 0;","margin: 0;","}",".less-error-message a {","color: #10a","}",".less-error-message .error {","color: red;","font-weight: bold;","padding-bottom: 2px;","border-bottom: 1px dashed red;","}"].join("\n"),{title:"error-message"}),f.style.cssText=["font-family: Arial, sans-serif","border: 1px solid #e00","background-color: #eee","border-radius: 5px","-webkit-border-radius: 5px","-moz-border-radius: 5px","color: #e00","padding: 15px","margin-bottom: 15px"].join(";"),d.env=="development"&&(g=setInterval(function(){document.body&&(document.getElementById(c)?document.body.replaceChild(f,document.getElementById(c)):document.body.insertBefore(f,document.body.firstChild),clearInterval(g))},10))}function u(a){d.env=="development"&&typeof console!="undefined"&&console.log("less: "+a)}function t(a){return a&&a.parentNode.removeChild(a)}function s(){if(a.XMLHttpRequest)return new XMLHttpRequest;try{return new ActiveXObject("MSXML2.XMLHTTP.3.0")}catch(b){u("browser doesn't support AJAX.");return null}}function r(a,b,c,e){function i(b,c,d){b.status>=200&&b.status<300?c(b.responseText,b.getResponseHeader("Last-Modified")):typeof d=="function"&&d(b.status,a)}var f=s(),h=g?!1:d.async;typeof f.overrideMimeType=="function"&&f.overrideMimeType("text/css"),f.open("GET",a,h),f.setRequestHeader("Accept",b||"text/x-less, text/css; q=0.9, */*; q=0.5"),f.send(null),g?f.status===0?c(f.responseText):e(f.status,a):h?f.onreadystatechange=function(){f.readyState==4&&i(f,c,e)}:i(f,c,e)}function q(a,b,c){var d,e=b.href?b.href.replace(/\?.*$/,""):"",f="less:"+(b.title||p(e));(d=document.getElementById(f))===null&&(d=document.createElement("style"),d.type="text/css",d.media=b.media||"screen",d.id=f,document.getElementsByTagName("head")[0].appendChild(d));if(d.styleSheet)try{d.styleSheet.cssText=a}catch(g){throw new Error("Couldn't reassign styleSheet.cssText.")}else(function(a){d.childNodes.length>0?d.firstChild.nodeValue!==a.nodeValue&&d.replaceChild(a,d.firstChild):d.appendChild(a)})(document.createTextNode(a));c&&h&&(u("saving "+e+" to cache."),h.setItem(e,a),h.setItem(e+":timestamp",c))}function p(a){return a.replace(/^[a-z]+:\/\/?[^\/]+/,"").replace(/^\//,"").replace(/\?.*$/,"").replace(/\.[^\.\/]+$/,"").replace(/[^\.\w-]+/g,"-").replace(/\./g,":")}function o(b,c,e,f){var g=a.location.href.replace(/[#?].*$/,""),i=b.href.replace(/\?.*$/,""),j=h&&h.getItem(i),k=h&&h.getItem(i+":timestamp"),l={css:j,timestamp:k};/^(https?|file):/.test(i)||(i.charAt(0)=="/"?i=a.location.protocol+"//"+a.location.host+i:i=g.slice(0,g.lastIndexOf("/")+1)+i),r(b.href,b.type,function(a,g){if(!e&&l&&g&&(new Date(g)).valueOf()===(new Date(l.timestamp)).valueOf())q(l.css,b),c(null,b,{local:!0,remaining:f});else try{(new d.Parser({optimization:d.optimization,paths:[i.replace(/[\w\.-]+$/,"")],mime:b.type})).parse(a,function(a,d){if(a)return v(a,i);try{c(d,b,{local:!1,lastModified:g,remaining:f}),t(document.getElementById("less-error-message:"+p(i)))}catch(a){v(a,i)}})}catch(h){v(h,i)}},function(a,b){throw new Error("Couldn't load "+b+" ("+a+")")})}function n(a,b){for(var c=0;c>>0;for(var d=0;d>>0,c=Array(b),d=arguments[1];for(var e=0;e>>0,c=0;if(b===0&&arguments.length===1)throw new TypeError;if(arguments.length>=2)var d=arguments[1];else for(;;){if(c in this){d=this[c++];break}if(++c>=b)throw new TypeError}for(;c=b)return-1;c<0&&(c+=b);for(;ck&&(j[f]=j[f].slice(c-k),k=c)}function q(){j[f]=g,c=h,k=c}function p(){g=j[f],h=c,k=c}var b,c,f,g,h,i,j,k,l,m=this,n=function(){},o=this.imports={paths:a&&a.paths||[],queue:[],files:{},mime:a&&a.mime,push:function(b,c){var e=this;this.queue.push(b),d.Parser.importer(b,this.paths,function(a){e.queue.splice(e.queue.indexOf(b),1),e.files[b]=a,c(a),e.queue.length===0&&n()},a)}};this.env=a=a||{},this.optimization="optimization"in this.env?this.env.optimization:1,this.env.filename=this.env.filename||null;return l={imports:o,parse:function(d,g){var h,l,m,o,p,q,r=[],t,u=null;c=f=k=i=0,j=[],b=d.replace(/\r\n/g,"\n"),j=function(c){var d=0,e=/[^"'`\{\}\/\(\)]+/g,f=/\/\*(?:[^*]|\*+[^\/*])*\*+\/|\/\/.*/g,g=0,h,i=c[0],j,k;for(var l=0,m,n;l0)throw{type:"Syntax",message:"Missing closing `}`",filename:a.filename};return c.map(function(a){return a.join("")})}([[]]),h=new e.Ruleset([],s(this.parsers.primary)),h.root=!0,h.toCSS=function(c){var d,f,g;return function(g,h){function n(a){return a?(b.slice(0,a).match(/\n/g)||"").length:null}var i=[];g=g||{},typeof h=="object"&&!Array.isArray(h)&&(h=Object.keys(h).map(function(a){var b=h[a];b instanceof e.Value||(b instanceof e.Expression||(b=new e.Expression([b])),b=new e.Value([b]));return new e.Rule("@"+a,b,!1,0)}),i=[new e.Ruleset(null,h)]);try{var j=c.call(this,{frames:i}).toCSS([],{compress:g.compress||!1})}catch(k){f=b.split("\n"),d=n(k.index);for(var l=k.index,m=-1;l>=0&&b.charAt(l)!=="\n";l--)m++;throw{type:k.type,message:k.message,filename:a.filename,index:k.index,line:typeof d=="number"?d+1:null,callLine:k.call&&n(k.call)+1,callExtract:f[n(k.call)],stack:k.stack,column:m,extract:[f[d-1],f[d],f[d+1]]}}return g.compress?j.replace(/(\s)+/g,"$1"):j}}(h.eval);if(c=0&&b.charAt(v)!=="\n";v--)w++;u={name:"ParseError",message:"Syntax Error on line "+p,index:c,filename:a.filename,line:p,column:w,extract:[q[p-2],q[p-1],q[p]]}}this.imports.queue.length>0?n=function(){g(u,h)}:g(u,h)},parsers:{primary:function(){var a,b=[];while((a=s(this.mixin.definition)||s(this.rule)||s(this.ruleset)||s(this.mixin.call)||s(this.comment)||s(this.directive))||s(/^[\s\n]+/))a&&b.push(a);return b},comment:function(){var a;if(b.charAt(c)==="/"){if(b.charAt(c+1)==="/")return new e.Comment(s(/^\/\/.*/),!0);if(a=s(/^\/\*(?:[^*]|\*+[^\/*])*\*+\/\n?/))return new e.Comment(a)}},entities:{quoted:function(){var a,d=c,f;b.charAt(d)==="~"&&(d++,f=!0);if(b.charAt(d)==='"'||b.charAt(d)==="'"){f&&s("~");if(a=s(/^"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'/))return new e.Quoted(a[0],a[1]||a[2],f)}},keyword:function(){var a;if(a=s(/^[A-Za-z-]+/))return new e.Keyword(a)},call:function(){var a,b;if(!!(a=/^([\w-]+|%)\(/.exec(j[f]))){a=a[1].toLowerCase();if(a==="url")return null;c+=a.length;if(a==="alpha")return s(this.alpha);s("("),b=s(this.entities.arguments);if(!s(")"))return;if(a)return new e.Call(a,b)}},arguments:function(){var a=[],b;while(b=s(this.expression)){a.push(b);if(!s(","))break}return a},literal:function(){return s(this.entities.dimension)||s(this.entities.color)||s(this.entities.quoted)},url:function(){var a;if(b.charAt(c)==="u"&&!!s(/^url\(/)){a=s(this.entities.quoted)||s(this.entities.variable)||s(this.entities.dataURI)||s(/^[-\w%@$\/.&=:;#+?~]+/)||"";if(!s(")"))throw new Error("missing closing ) for url()");return new e.URL(a.value||a.data||a instanceof e.Variable?a:new e.Anonymous(a),o.paths)}},dataURI:function(){var a;if(s(/^data:/)){a={},a.mime=s(/^[^\/]+\/[^,;)]+/)||"",a.charset=s(/^;\s*charset=[^,;)]+/)||"",a.base64=s(/^;\s*base64/)||"",a.data=s(/^,\s*[^)]+/);if(a.data)return a}},variable:function(){var a,d=c;if(b.charAt(c)==="@"&&(a=s(/^@@?[\w-]+/)))return new e.Variable(a,d)},color:function(){var a;if(b.charAt(c)==="#"&&(a=s(/^#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})/)))return new e.Color(a[1])},dimension:function(){var a,d=b.charCodeAt(c);if(!(d>57||d<45||d===47))if(a=s(/^(-?\d*\.?\d+)(px|%|em|pc|ex|in|deg|s|ms|pt|cm|mm|rad|grad|turn)?/))return new e.Dimension(a[1],a[2])},javascript:function(){var a,d=c,f;b.charAt(d)==="~"&&(d++,f=!0);if(b.charAt(d)==="`"){f&&s("~");if(a=s(/^`([^`]*)`/))return new e.JavaScript(a[1],c,f)}}},variable:function(){var a;if(b.charAt(c)==="@"&&(a=s(/^(@[\w-]+)\s*:/)))return a[1]},shorthand:function(){var a,b;if(!!t(/^[@\w.%-]+\/[@\w.-]+/)&&(a=s(this.entity))&&s("/")&&(b=s(this.entity)))return new e.Shorthand(a,b)},mixin:{call:function(){var a=[],d,f,g,h=c,i=b.charAt(c);if(i==="."||i==="#"){while(d=s(/^[#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/))a.push(new e.Element(f,d)),f=s(">");s("(")&&(g=s(this.entities.arguments))&&s(")");if(a.length>0&&(s(";")||t("}")))return new e.mixin.Call(a,g,h)}},definition:function(){var a,d=[],f,g,h,i;if(!(b.charAt(c)!=="."&&b.charAt(c)!=="#"||t(/^[^{]*(;|})/)))if(f=s(/^([#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+)\s*\(/)){a=f[1];while(h=s(this.entities.variable)||s(this.entities.literal)||s(this.entities.keyword)){if(h instanceof e.Variable)if(s(":"))if(i=s(this.expression))d.push({name:h.name,value:i});else throw new Error("Expected value");else d.push({name:h.name});else d.push({value:h});if(!s(","))break}if(!s(")"))throw new Error("Expected )");g=s(this.block);if(g)return new e.mixin.Definition(a,d,g)}}},entity:function(){return s(this.entities.literal)||s(this.entities.variable)||s(this.entities.url)||s(this.entities.call)||s(this.entities.keyword)||s(this.entities.javascript)||s(this.comment)},end:function(){return s(";")||t("}")},alpha:function(){var a;if(!!s(/^opacity=/i))if(a=s(/^\d+/)||s(this.entities.variable)){if(!s(")"))throw new Error("missing closing ) for alpha()");return new e.Alpha(a)}},element:function(){var a,b,c;c=s(this.combinator),a=s(/^(?:[.#]?|:*)(?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/)||s("*")||s(this.attribute)||s(/^\([^)@]+\)/);if(a)return new e.Element(c,a)},combinator:function(){var a,d=b.charAt(c);if(d===">"||d==="&"||d==="+"||d==="~"){c++;while(b.charAt(c)===" ")c++;return new e.Combinator(d)}if(d===":"&&b.charAt(c+1)===":"){c+=2;while(b.charAt(c)===" ")c++;return new e.Combinator("::")}return b.charAt(c-1)===" "?new e.Combinator(" "):new e.Combinator(null)},selector:function(){var a,d,f=[],g,h;while(d=s(this.element)){g=b.charAt(c),f.push(d);if(g==="{"||g==="}"||g===";"||g===",")break}if(f.length>0)return new e.Selector(f)},tag:function(){return s(/^[a-zA-Z][a-zA-Z-]*[0-9]?/)||s("*")},attribute:function(){var a="",b,c,d;if(!!s("[")){if(b=s(/^[a-zA-Z-]+/)||s(this.entities.quoted))(d=s(/^[|~*$^]?=/))&&(c=s(this.entities.quoted)||s(/^[\w-]+/))?a=[b,d,c.toCSS?c.toCSS():c].join(""):a=b;if(!s("]"))return;if(a)return"["+a+"]"}},block:function(){var a;if(s("{")&&(a=s(this.primary))&&s("}"))return a},ruleset:function(){var a=[],b,d,g;p();if(g=/^([.#: \w-]+)[\s\n]*\{/.exec(j[f]))c+=g[0].length-1,a=[new e.Selector([new e.Element(null,g[1])])];else while(b=s(this.selector)){a.push(b),s(this.comment);if(!s(","))break;s(this.comment)}if(a.length>0&&(d=s(this.block)))return new e.Ruleset(a,d);i=c,q()},rule:function(){var a,d,g=b.charAt(c),k,l;p();if(g!=="."&&g!=="#"&&g!=="&")if(a=s(this.variable)||s(this.property)){a.charAt(0)!="@"&&(l=/^([^@+\/'"*`(;{}-]*);/.exec(j[f]))?(c+=l[0].length-1,d=new e.Anonymous(l[1])):a==="font"?d=s(this.font):d=s(this.value),k=s(this.important);if(d&&s(this.end))return new e.Rule(a,d,k,h);i=c,q()}},"import":function(){var a;if(s(/^@import\s+/)&&(a=s(this.entities.quoted)||s(this.entities.url))&&s(";"))return new e.Import(a,o)},directive:function(){var a,d,f,g;if(b.charAt(c)==="@"){if(d=s(this["import"]))return d;if(a=s(/^@media|@page|@-[-a-z]+/)){g=(s(/^[^{]+/)||"").trim();if(f=s(this.block))return new e.Directive(a+" "+g,f)}else if(a=s(/^@[-a-z]+/))if(a==="@font-face"){if(f=s(this.block))return new e.Directive(a,f)}else if((d=s(this.entity))&&s(";"))return new e.Directive(a,d)}},font:function(){var a=[],b=[],c,d,f,g;while(g=s(this.shorthand)||s(this.entity))b.push(g);a.push(new e.Expression(b));if(s(","))while(g=s(this.expression)){a.push(g);if(!s(","))break}return new e.Value(a)},value:function(){var a,b=[],c;while(a=s(this.expression)){b.push(a);if(!s(","))break}if(b.length>0)return new e.Value(b)},important:function(){if(b.charAt(c)==="!")return s(/^! *important/)},sub:function(){var a;if(s("(")&&(a=s(this.expression))&&s(")"))return a},multiplication:function(){var a,b,c,d;if(a=s(this.operand)){while((c=s("/")||s("*"))&&(b=s(this.operand)))d=new e.Operation(c,[d||a,b]);return d||a}},addition:function(){var a,d,f,g;if(a=s(this.multiplication)){while((f=s(/^[-+]\s+/)||b.charAt(c-1)!=" "&&(s("+")||s("-")))&&(d=s(this.multiplication)))g=new e.Operation(f,[g||a,d]);return g||a}},operand:function(){var a,d=b.charAt(c+1);b.charAt(c)==="-"&&(d==="@"||d==="(")&&(a=s("-"));var f=s(this.sub)||s(this.entities.dimension)||s(this.entities.color)||s(this.entities.variable)||s(this.entities.call);return a?new e.Operation("*",[new e.Dimension(-1),f]):f},expression:function(){var a,b,c=[],d;while(a=s(this.addition)||s(this.entity))c.push(a);if(c.length>0)return new e.Expression(c)},property:function(){var a;if(a=s(/^(\*?-?[-a-z_0-9]+)\s*:/))return a[1]}}}},typeof a!="undefined"&&(d.Parser.importer=function(a,b,c,d){a.charAt(0)!=="/"&&b.length>0&&(a=b[0]+a),o({href:a,title:a,type:d.mime},c,!0)}),function(a){function d(a){return Math.min(1,Math.max(0,a))}function c(b){if(b instanceof a.Dimension)return parseFloat(b.unit=="%"?b.value/100:b.value);if(typeof b=="number")return b;throw{error:"RuntimeError",message:"color functions take numbers as parameters"}}function b(b){return a.functions.hsla(b.h,b.s,b.l,b.a)}a.functions={rgb:function(a,b,c){return this.rgba(a,b,c,1)},rgba:function(b,d,e,f){var g=[b,d,e].map(function(a){return c(a)}),f=c(f);return new a.Color(g,f)},hsl:function(a,b,c){return this.hsla(a,b,c,1)},hsla:function(a,b,d,e){function h(a){a=a<0?a+1:a>1?a-1:a;return a*6<1?g+(f-g)*a*6:a*2<1?f:a*3<2?g+(f-g)*(2/3-a)*6:g}a=c(a)%360/360,b=c(b),d=c(d),e=c(e);var f=d<=.5?d*(b+1):d+b-d*b,g=d*2-f;return this.rgba(h(a+1/3)*255,h(a)*255,h(a-1/3)*255,e)},hue:function(b){return new a.Dimension(Math.round(b.toHSL().h))},saturation:function(b){return new a.Dimension(Math.round(b.toHSL().s*100),"%")},lightness:function(b){return new a.Dimension(Math.round(b.toHSL().l*100),"%")},alpha:function(b){return new a.Dimension(b.toHSL().a)},saturate:function(a,c){var e=a.toHSL();e.s+=c.value/100,e.s=d(e.s);return b(e)},desaturate:function(a,c){var e=a.toHSL();e.s-=c.value/100,e.s=d(e.s);return b(e)},lighten:function(a,c){var e=a.toHSL();e.l+=c.value/100,e.l=d(e.l);return b(e)},darken:function(a,c){var e=a.toHSL();e.l-=c.value/100,e.l=d(e.l);return b(e)},fadein:function(a,c){var e=a.toHSL();e.a+=c.value/100,e.a=d(e.a);return b(e)},fadeout:function(a,c){var e=a.toHSL();e.a-=c.value/100,e.a=d(e.a);return b(e)},spin:function(a,c){var d=a.toHSL(),e=(d.h+c.value)%360;d.h=e<0?360+e:e;return b(d)},mix:function(b,c,d){var e=d.value/100,f=e*2-1,g=b.toHSL().a-c.toHSL().a,h=((f*g==-1?f:(f+g)/(1+f*g))+1)/2,i=1-h,j=[b.rgb[0]*h+c.rgb[0]*i,b.rgb[1]*h+c.rgb[1]*i,b.rgb[2]*h+c.rgb[2]*i],k=b.alpha*e+c.alpha*(1-e);return new a.Color(j,k)},greyscale:function(b){return this.desaturate(b,new a.Dimension(100))},e:function(b){return new a.Anonymous(b instanceof a.JavaScript?b.evaluated:b)},escape:function(b){return new a.Anonymous(encodeURI(b.value).replace(/=/g,"%3D").replace(/:/g,"%3A").replace(/#/g,"%23").replace(/;/g,"%3B").replace(/\(/g,"%28").replace(/\)/g,"%29"))},"%":function(b){var c=Array.prototype.slice.call(arguments,1),d=b.value;for(var e=0;e255?255:a<0?0:a).toString(16);return a.length===1?"0"+a:a}).join("")},operate:function(b,c){var d=[];c instanceof a.Color||(c=c.toColor());for(var e=0;e<3;e++)d[e]=a.operate(b,this.rgb[e],c.rgb[e]);return new a.Color(d,this.alpha+c.alpha)},toHSL:function(){var a=this.rgb[0]/255,b=this.rgb[1]/255,c=this.rgb[2]/255,d=this.alpha,e=Math.max(a,b,c),f=Math.min(a,b,c),g,h,i=(e+f)/2,j=e-f;if(e===f)g=h=0;else{h=i>.5?j/(2-e-f):j/(e+f);switch(e){case a:g=(b-c)/j+(b":a.compress?">":" > "}[this.value]}}(c("less/tree")),function(a){a.Expression=function(a){this.value=a},a.Expression.prototype={eval:function(b){return this.value.length>1?new a.Expression(this.value.map(function(a){return a.eval(b)})):this.value[0].eval(b)},toCSS:function(a){return this.value.map(function(b){return b.toCSS(a)}).join(" ")}}}(c("less/tree")),function(a){a.Import=function(b,c){var d=this;this._path=b,b instanceof a.Quoted?this.path=/\.(le?|c)ss$/.test(b.value)?b.value:b.value+".less":this.path=b.value.value||b.value,this.css=/css$/.test(this.path),this.css||c.push(this.path,function(a){if(!a)throw new Error("Error parsing "+d.path);d.root=a})},a.Import.prototype={toCSS:function(){return this.css?"@import "+this._path.toCSS()+";\n":""},eval:function(b){var c;if(this.css)return this;c=new a.Ruleset(null,this.root.rules.slice(0));for(var d=0;d0){for(var f=0;f0&&c>this.params.length)return!1;d=Math.min(c,this.arity);for(var e=0;e1?Array.prototype.push.apply(d,e.find(new a.Selector(b.elements.slice(1)),c)):d.push(e);break}});return this._lookups[g]=d},toCSS:function(b,c){var d=[],e=[],f=[],g=[],h,i;if(!this.root)if(b.length===0)g=this.selectors.map(function(a){return[a]});else for(var j=0;j0&&(h=g.map(function(a){return a.map(function(a){return a.toCSS(c)}).join("").trim()}).join(c.compress?",":g.length>3?",\n":", "),d.push(h,(c.compress?"{":" {\n ")+e.join(c.compress?"":"\n ")+(c.compress?"}":"\n}\n"))),d.push(f);return d.join("")+(c.compress?"\n":"")}}}(c("less/tree")),function(a){a.Selector=function(a){this.elements=a,this.elements[0].combinator.value===""&&(this.elements[0].combinator.value=" ")},a.Selector.prototype.match=function(a){return this.elements[0].value===a.elements[0].value?!0:!1},a.Selector.prototype.toCSS=function(a){if(this._css)return this._css;return this._css=this.elements.map(function(b){return typeof b=="string"?" "+b.trim():b.toCSS(a)}).join("")}}(c("less/tree")),function(b){b.URL=function(b,c){b.data?this.attrs=b:(!/^(?:https?:\/|file:\/|data:\/)?\//.test(b.value)&&c.length>0&&typeof a!="undefined"&&(b.value=c[0]+(b.value.charAt(0)==="/"?b.value.slice(1):b.value)),this.value=b,this.paths=c)},b.URL.prototype={toCSS:function(){return"url("+(this.attrs?"data:"+this.attrs.mime+this.attrs.charset+this.attrs.base64+this.attrs.data:this.value.toCSS())+")"},eval:function(a){return this.attrs?this:new b.URL(this.value.eval(a),this.paths)}}}(c("less/tree")),function(a){a.Value=function(a){this.value=a,this.is="value"},a.Value.prototype={eval:function(b){return this.value.length===1?this.value[0].eval(b):new a.Value(this.value.map(function(a){return a.eval(b)}))},toCSS:function(a){return this.value.map(function(b){return b.toCSS(a)}).join(a.compress?",":", ")}}}(c("less/tree")),function(a){a.Variable=function(a,b){this.name=a,this.index=b},a.Variable.prototype={eval:function(b){var c,d,e=this.name;e.indexOf("@@")==0&&(e="@"+(new a.Variable(e.slice(1))).eval(b).value);if(c=a.find(b.frames,function(a){if(d=a.variable(e))return d.value.eval(b)}))return c;throw{message:"variable "+e+" is undefined",index:this.index}}}}(c("less/tree")),c("less/tree").find=function(a,b){for(var c=0,d;c0||g?"development":"production"),d.async=!1,d.poll=d.poll||(g?1e3:1500),d.watch=function(){return this.watchMode=!0},d.unwatch=function(){return this.watchMode=!1},d.env==="development"?(d.optimization=0,/!watch/.test(location.hash)&&d.watch(),d.watchTimer=setInterval(function(){d.watchMode&&n(function(a,b,c){a&&q(a.toCSS(),b,c.lastModified)})},d.poll)):d.optimization=3;var h;try{h=typeof a.localStorage=="undefined"?null:a.localStorage}catch(i){h=null}var j=document.getElementsByTagName("link"),k=/^text\/(x-)?less$/;d.sheets=[];for(var l=0;l>> 0; + for (var i = 0; i < len; i++) { + if (i in this) { + block.call(thisObject, this[i], i, this); + } + } + }; +} +if (!Array.prototype.map) { + Array.prototype.map = function(fun /*, thisp*/) { + var len = this.length >>> 0; + var res = new Array(len); + var thisp = arguments[1]; + + for (var i = 0; i < len; i++) { + if (i in this) { + res[i] = fun.call(thisp, this[i], i, this); + } + } + return res; + }; +} +if (!Array.prototype.filter) { + Array.prototype.filter = function (block /*, thisp */) { + var values = []; + var thisp = arguments[1]; + for (var i = 0; i < this.length; i++) { + if (block.call(thisp, this[i])) { + values.push(this[i]); + } + } + return values; + }; +} +if (!Array.prototype.reduce) { + Array.prototype.reduce = function(fun /*, initial*/) { + var len = this.length >>> 0; + var i = 0; + + // no value to return if no initial value and an empty array + if (len === 0 && arguments.length === 1) throw new TypeError(); + + if (arguments.length >= 2) { + var rv = arguments[1]; + } else { + do { + if (i in this) { + rv = this[i++]; + break; + } + // if array contains no values, no initial value to return + if (++i >= len) throw new TypeError(); + } while (true); + } + for (; i < len; i++) { + if (i in this) { + rv = fun.call(null, rv, this[i], i, this); + } + } + return rv; + }; +} +if (!Array.prototype.indexOf) { + Array.prototype.indexOf = function (value /*, fromIndex */ ) { + var length = this.length; + var i = arguments[1] || 0; + + if (!length) return -1; + if (i >= length) return -1; + if (i < 0) i += length; + + for (; i < length; i++) { + if (!Object.prototype.hasOwnProperty.call(this, i)) { continue } + if (value === this[i]) return i; + } + return -1; + }; +} + +// +// Object +// +if (!Object.keys) { + Object.keys = function (object) { + var keys = []; + for (var name in object) { + if (Object.prototype.hasOwnProperty.call(object, name)) { + keys.push(name); + } + } + return keys; + }; +} + +// +// String +// +if (!String.prototype.trim) { + String.prototype.trim = function () { + return String(this).replace(/^\s\s*/, '').replace(/\s\s*$/, ''); + }; +} +var less, tree; + +if (typeof(window) === 'undefined') { + less = exports, + tree = require('less/tree'); +} else { + if (typeof(window.less) === 'undefined') { window.less = {} } + less = window.less, + tree = window.less.tree = {}; +} +// +// less.js - parser +// +// A relatively straight-forward predictive parser. +// There is no tokenization/lexing stage, the input is parsed +// in one sweep. +// +// To make the parser fast enough to run in the browser, several +// optimization had to be made: +// +// - Matching and slicing on a huge input is often cause of slowdowns. +// The solution is to chunkify the input into smaller strings. +// The chunks are stored in the `chunks` var, +// `j` holds the current chunk index, and `current` holds +// the index of the current chunk in relation to `input`. +// This gives us an almost 4x speed-up. +// +// - In many cases, we don't need to match individual tokens; +// for example, if a value doesn't hold any variables, operations +// or dynamic references, the parser can effectively 'skip' it, +// treating it as a literal. +// An example would be '1px solid #000' - which evaluates to itself, +// we don't need to know what the individual components are. +// The drawback, of course is that you don't get the benefits of +// syntax-checking on the CSS. This gives us a 50% speed-up in the parser, +// and a smaller speed-up in the code-gen. +// +// +// Token matching is done with the `$` function, which either takes +// a terminal string or regexp, or a non-terminal function to call. +// It also takes care of moving all the indices forwards. +// +// +less.Parser = function Parser(env) { + var input, // LeSS input string + i, // current index in `input` + j, // current chunk + temp, // temporarily holds a chunk's state, for backtracking + memo, // temporarily holds `i`, when backtracking + furthest, // furthest index the parser has gone to + chunks, // chunkified input + current, // index of current chunk, in `input` + parser; + + var that = this; + + // This function is called after all files + // have been imported through `@import`. + var finish = function () {}; + + var imports = this.imports = { + paths: env && env.paths || [], // Search paths, when importing + queue: [], // Files which haven't been imported yet + files: {}, // Holds the imported parse trees + mime: env && env.mime, // MIME type of .less files + push: function (path, callback) { + var that = this; + this.queue.push(path); + + // + // Import a file asynchronously + // + less.Parser.importer(path, this.paths, function (root) { + that.queue.splice(that.queue.indexOf(path), 1); // Remove the path from the queue + that.files[path] = root; // Store the root + + callback(root); + + if (that.queue.length === 0) { finish() } // Call `finish` if we're done importing + }, env); + } + }; + + function save() { temp = chunks[j], memo = i, current = i } + function restore() { chunks[j] = temp, i = memo, current = i } + + function sync() { + if (i > current) { + chunks[j] = chunks[j].slice(i - current); + current = i; + } + } + // + // Parse from a token, regexp or string, and move forward if match + // + function $(tok) { + var match, args, length, c, index, endIndex, k, mem; + + // + // Non-terminal + // + if (tok instanceof Function) { + return tok.call(parser.parsers); + // + // Terminal + // + // Either match a single character in the input, + // or match a regexp in the current chunk (chunk[j]). + // + } else if (typeof(tok) === 'string') { + match = input.charAt(i) === tok ? tok : null; + length = 1; + sync (); + } else { + sync (); + + if (match = tok.exec(chunks[j])) { + length = match[0].length; + } else { + return null; + } + } + + // The match is confirmed, add the match length to `i`, + // and consume any extra white-space characters (' ' || '\n') + // which come after that. The reason for this is that LeSS's + // grammar is mostly white-space insensitive. + // + if (match) { + mem = i += length; + endIndex = i + chunks[j].length - length; + + while (i < endIndex) { + c = input.charCodeAt(i); + if (! (c === 32 || c === 10 || c === 9)) { break } + i++; + } + chunks[j] = chunks[j].slice(length + (i - mem)); + current = i; + + if (chunks[j].length === 0 && j < chunks.length - 1) { j++ } + + if(typeof(match) === 'string') { + return match; + } else { + return match.length === 1 ? match[0] : match; + } + } + } + + // Same as $(), but don't change the state of the parser, + // just return the match. + function peek(tok) { + if (typeof(tok) === 'string') { + return input.charAt(i) === tok; + } else { + if (tok.test(chunks[j])) { + return true; + } else { + return false; + } + } + } + + this.env = env = env || {}; + + // The optimization level dictates the thoroughness of the parser, + // the lower the number, the less nodes it will create in the tree. + // This could matter for debugging, or if you want to access + // the individual nodes in the tree. + this.optimization = ('optimization' in this.env) ? this.env.optimization : 1; + + this.env.filename = this.env.filename || null; + + // + // The Parser + // + return parser = { + + imports: imports, + // + // Parse an input string into an abstract syntax tree, + // call `callback` when done. + // + parse: function (str, callback) { + var root, start, end, zone, line, lines, buff = [], c, error = null; + + i = j = current = furthest = 0; + chunks = []; + input = str.replace(/\r\n/g, '\n'); + + // Split the input into chunks. + chunks = (function (chunks) { + var j = 0, + skip = /[^"'`\{\}\/\(\)]+/g, + comment = /\/\*(?:[^*]|\*+[^\/*])*\*+\/|\/\/.*/g, + level = 0, + match, + chunk = chunks[0], + inParam, + inString; + + for (var i = 0, c, cc; i < input.length; i++) { + skip.lastIndex = i; + if (match = skip.exec(input)) { + if (match.index === i) { + i += match[0].length; + chunk.push(match[0]); + } + } + c = input.charAt(i); + comment.lastIndex = i; + + if (!inString && !inParam && c === '/') { + cc = input.charAt(i + 1); + if (cc === '/' || cc === '*') { + if (match = comment.exec(input)) { + if (match.index === i) { + i += match[0].length; + chunk.push(match[0]); + c = input.charAt(i); + } + } + } + } + + if (c === '{' && !inString && !inParam) { level ++; + chunk.push(c); + } else if (c === '}' && !inString && !inParam) { level --; + chunk.push(c); + chunks[++j] = chunk = []; + } else if (c === '(' && !inString && !inParam) { + chunk.push(c); + inParam = true; + } else if (c === ')' && !inString && inParam) { + chunk.push(c); + inParam = false; + } else { + if (c === '"' || c === "'" || c === '`') { + if (! inString) { + inString = c; + } else { + inString = inString === c ? false : inString; + } + } + chunk.push(c); + } + } + if (level > 0) { + throw { + type: 'Syntax', + message: "Missing closing `}`", + filename: env.filename + }; + } + + return chunks.map(function (c) { return c.join('') });; + })([[]]); + + // Start with the primary rule. + // The whole syntax tree is held under a Ruleset node, + // with the `root` property set to true, so no `{}` are + // output. The callback is called when the input is parsed. + root = new(tree.Ruleset)([], $(this.parsers.primary)); + root.root = true; + + root.toCSS = (function (evaluate) { + var line, lines, column; + + return function (options, variables) { + var frames = []; + + options = options || {}; + // + // Allows setting variables with a hash, so: + // + // `{ color: new(tree.Color)('#f01') }` will become: + // + // new(tree.Rule)('@color', + // new(tree.Value)([ + // new(tree.Expression)([ + // new(tree.Color)('#f01') + // ]) + // ]) + // ) + // + if (typeof(variables) === 'object' && !Array.isArray(variables)) { + variables = Object.keys(variables).map(function (k) { + var value = variables[k]; + + if (! (value instanceof tree.Value)) { + if (! (value instanceof tree.Expression)) { + value = new(tree.Expression)([value]); + } + value = new(tree.Value)([value]); + } + return new(tree.Rule)('@' + k, value, false, 0); + }); + frames = [new(tree.Ruleset)(null, variables)]; + } + + try { + var css = evaluate.call(this, { frames: frames }) + .toCSS([], { compress: options.compress || false }); + } catch (e) { + lines = input.split('\n'); + line = getLine(e.index); + + for (var n = e.index, column = -1; + n >= 0 && input.charAt(n) !== '\n'; + n--) { column++ } + + throw { + type: e.type, + message: e.message, + filename: env.filename, + index: e.index, + line: typeof(line) === 'number' ? line + 1 : null, + callLine: e.call && (getLine(e.call) + 1), + callExtract: lines[getLine(e.call)], + stack: e.stack, + column: column, + extract: [ + lines[line - 1], + lines[line], + lines[line + 1] + ] + }; + } + if (options.compress) { + return css.replace(/(\s)+/g, "$1"); + } else { + return css; + } + + function getLine(index) { + return index ? (input.slice(0, index).match(/\n/g) || "").length : null; + } + }; + })(root.eval); + + // If `i` is smaller than the `input.length - 1`, + // it means the parser wasn't able to parse the whole + // string, so we've got a parsing error. + // + // We try to extract a \n delimited string, + // showing the line where the parse error occured. + // We split it up into two parts (the part which parsed, + // and the part which didn't), so we can color them differently. + if (i < input.length - 1) { + i = furthest; + lines = input.split('\n'); + line = (input.slice(0, i).match(/\n/g) || "").length + 1; + + for (var n = i, column = -1; n >= 0 && input.charAt(n) !== '\n'; n--) { column++ } + + error = { + name: "ParseError", + message: "Syntax Error on line " + line, + index: i, + filename: env.filename, + line: line, + column: column, + extract: [ + lines[line - 2], + lines[line - 1], + lines[line] + ] + }; + } + + if (this.imports.queue.length > 0) { + finish = function () { callback(error, root) }; + } else { + callback(error, root); + } + }, + + // + // Here in, the parsing rules/functions + // + // The basic structure of the syntax tree generated is as follows: + // + // Ruleset -> Rule -> Value -> Expression -> Entity + // + // Here's some LESS code: + // + // .class { + // color: #fff; + // border: 1px solid #000; + // width: @w + 4px; + // > .child {...} + // } + // + // And here's what the parse tree might look like: + // + // Ruleset (Selector '.class', [ + // Rule ("color", Value ([Expression [Color #fff]])) + // Rule ("border", Value ([Expression [Dimension 1px][Keyword "solid"][Color #000]])) + // Rule ("width", Value ([Expression [Operation "+" [Variable "@w"][Dimension 4px]]])) + // Ruleset (Selector [Element '>', '.child'], [...]) + // ]) + // + // In general, most rules will try to parse a token with the `$()` function, and if the return + // value is truly, will return a new node, of the relevant type. Sometimes, we need to check + // first, before parsing, that's when we use `peek()`. + // + parsers: { + // + // The `primary` rule is the *entry* and *exit* point of the parser. + // The rules here can appear at any level of the parse tree. + // + // The recursive nature of the grammar is an interplay between the `block` + // rule, which represents `{ ... }`, the `ruleset` rule, and this `primary` rule, + // as represented by this simplified grammar: + // + // primary → (ruleset | rule)+ + // ruleset → selector+ block + // block → '{' primary '}' + // + // Only at one point is the primary rule not called from the + // block rule: at the root level. + // + primary: function () { + var node, root = []; + + while ((node = $(this.mixin.definition) || $(this.rule) || $(this.ruleset) || + $(this.mixin.call) || $(this.comment) || $(this.directive)) + || $(/^[\s\n]+/)) { + node && root.push(node); + } + return root; + }, + + // We create a Comment node for CSS comments `/* */`, + // but keep the LeSS comments `//` silent, by just skipping + // over them. + comment: function () { + var comment; + + if (input.charAt(i) !== '/') return; + + if (input.charAt(i + 1) === '/') { + return new(tree.Comment)($(/^\/\/.*/), true); + } else if (comment = $(/^\/\*(?:[^*]|\*+[^\/*])*\*+\/\n?/)) { + return new(tree.Comment)(comment); + } + }, + + // + // Entities are tokens which can be found inside an Expression + // + entities: { + // + // A string, which supports escaping " and ' + // + // "milky way" 'he\'s the one!' + // + quoted: function () { + var str, j = i, e; + + if (input.charAt(j) === '~') { j++, e = true } // Escaped strings + if (input.charAt(j) !== '"' && input.charAt(j) !== "'") return; + + e && $('~'); + + if (str = $(/^"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'/)) { + return new(tree.Quoted)(str[0], str[1] || str[2], e); + } + }, + + // + // A catch-all word, such as: + // + // black border-collapse + // + keyword: function () { + var k; + if (k = $(/^[A-Za-z-]+/)) { return new(tree.Keyword)(k) } + }, + + // + // A function call + // + // rgb(255, 0, 255) + // + // We also try to catch IE's `alpha()`, but let the `alpha` parser + // deal with the details. + // + // The arguments are parsed with the `entities.arguments` parser. + // + call: function () { + var name, args; + + if (! (name = /^([\w-]+|%)\(/.exec(chunks[j]))) return; + + name = name[1].toLowerCase(); + + if (name === 'url') { return null } + else { i += name.length } + + if (name === 'alpha') { return $(this.alpha) } + + $('('); // Parse the '(' and consume whitespace. + + args = $(this.entities.arguments); + + if (! $(')')) return; + + if (name) { return new(tree.Call)(name, args) } + }, + arguments: function () { + var args = [], arg; + + while (arg = $(this.expression)) { + args.push(arg); + if (! $(',')) { break } + } + return args; + }, + literal: function () { + return $(this.entities.dimension) || + $(this.entities.color) || + $(this.entities.quoted); + }, + + // + // Parse url() tokens + // + // We use a specific rule for urls, because they don't really behave like + // standard function calls. The difference is that the argument doesn't have + // to be enclosed within a string, so it can't be parsed as an Expression. + // + url: function () { + var value; + + if (input.charAt(i) !== 'u' || !$(/^url\(/)) return; + value = $(this.entities.quoted) || $(this.entities.variable) || + $(this.entities.dataURI) || $(/^[-\w%@$\/.&=:;#+?~]+/) || ""; + if (! $(')')) throw new(Error)("missing closing ) for url()"); + + return new(tree.URL)((value.value || value.data || value instanceof tree.Variable) + ? value : new(tree.Anonymous)(value), imports.paths); + }, + + dataURI: function () { + var obj; + + if ($(/^data:/)) { + obj = {}; + obj.mime = $(/^[^\/]+\/[^,;)]+/) || ''; + obj.charset = $(/^;\s*charset=[^,;)]+/) || ''; + obj.base64 = $(/^;\s*base64/) || ''; + obj.data = $(/^,\s*[^)]+/); + + if (obj.data) { return obj } + } + }, + + // + // A Variable entity, such as `@fink`, in + // + // width: @fink + 2px + // + // We use a different parser for variable definitions, + // see `parsers.variable`. + // + variable: function () { + var name, index = i; + + if (input.charAt(i) === '@' && (name = $(/^@@?[\w-]+/))) { + return new(tree.Variable)(name, index); + } + }, + + // + // A Hexadecimal color + // + // #4F3C2F + // + // `rgb` and `hsl` colors are parsed through the `entities.call` parser. + // + color: function () { + var rgb; + + if (input.charAt(i) === '#' && (rgb = $(/^#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})/))) { + return new(tree.Color)(rgb[1]); + } + }, + + // + // A Dimension, that is, a number and a unit + // + // 0.5em 95% + // + dimension: function () { + var value, c = input.charCodeAt(i); + if ((c > 57 || c < 45) || c === 47) return; + + if (value = $(/^(-?\d*\.?\d+)(px|%|em|pc|ex|in|deg|s|ms|pt|cm|mm|rad|grad|turn)?/)) { + return new(tree.Dimension)(value[1], value[2]); + } + }, + + // + // JavaScript code to be evaluated + // + // `window.location.href` + // + javascript: function () { + var str, j = i, e; + + if (input.charAt(j) === '~') { j++, e = true } // Escaped strings + if (input.charAt(j) !== '`') { return } + + e && $('~'); + + if (str = $(/^`([^`]*)`/)) { + return new(tree.JavaScript)(str[1], i, e); + } + } + }, + + // + // The variable part of a variable definition. Used in the `rule` parser + // + // @fink: + // + variable: function () { + var name; + + if (input.charAt(i) === '@' && (name = $(/^(@[\w-]+)\s*:/))) { return name[1] } + }, + + // + // A font size/line-height shorthand + // + // small/12px + // + // We need to peek first, or we'll match on keywords and dimensions + // + shorthand: function () { + var a, b; + + if (! peek(/^[@\w.%-]+\/[@\w.-]+/)) return; + + if ((a = $(this.entity)) && $('/') && (b = $(this.entity))) { + return new(tree.Shorthand)(a, b); + } + }, + + // + // Mixins + // + mixin: { + // + // A Mixin call, with an optional argument list + // + // #mixins > .square(#fff); + // .rounded(4px, black); + // .button; + // + // The `while` loop is there because mixins can be + // namespaced, but we only support the child and descendant + // selector for now. + // + call: function () { + var elements = [], e, c, args, index = i, s = input.charAt(i); + + if (s !== '.' && s !== '#') { return } + + while (e = $(/^[#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/)) { + elements.push(new(tree.Element)(c, e)); + c = $('>'); + } + $('(') && (args = $(this.entities.arguments)) && $(')'); + + if (elements.length > 0 && ($(';') || peek('}'))) { + return new(tree.mixin.Call)(elements, args, index); + } + }, + + // + // A Mixin definition, with a list of parameters + // + // .rounded (@radius: 2px, @color) { + // ... + // } + // + // Until we have a finer grained state-machine, we have to + // do a look-ahead, to make sure we don't have a mixin call. + // See the `rule` function for more information. + // + // We start by matching `.rounded (`, and then proceed on to + // the argument list, which has optional default values. + // We store the parameters in `params`, with a `value` key, + // if there is a value, such as in the case of `@radius`. + // + // Once we've got our params list, and a closing `)`, we parse + // the `{...}` block. + // + definition: function () { + var name, params = [], match, ruleset, param, value; + + if ((input.charAt(i) !== '.' && input.charAt(i) !== '#') || + peek(/^[^{]*(;|})/)) return; + + if (match = $(/^([#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+)\s*\(/)) { + name = match[1]; + + while (param = $(this.entities.variable) || $(this.entities.literal) + || $(this.entities.keyword)) { + // Variable + if (param instanceof tree.Variable) { + if ($(':')) { + if (value = $(this.expression)) { + params.push({ name: param.name, value: value }); + } else { + throw new(Error)("Expected value"); + } + } else { + params.push({ name: param.name }); + } + } else { + params.push({ value: param }); + } + if (! $(',')) { break } + } + if (! $(')')) throw new(Error)("Expected )"); + + ruleset = $(this.block); + + if (ruleset) { + return new(tree.mixin.Definition)(name, params, ruleset); + } + } + } + }, + + // + // Entities are the smallest recognized token, + // and can be found inside a rule's value. + // + entity: function () { + return $(this.entities.literal) || $(this.entities.variable) || $(this.entities.url) || + $(this.entities.call) || $(this.entities.keyword) || $(this.entities.javascript) || + $(this.comment); + }, + + // + // A Rule terminator. Note that we use `peek()` to check for '}', + // because the `block` rule will be expecting it, but we still need to make sure + // it's there, if ';' was ommitted. + // + end: function () { + return $(';') || peek('}'); + }, + + // + // IE's alpha function + // + // alpha(opacity=88) + // + alpha: function () { + var value; + + if (! $(/^opacity=/i)) return; + if (value = $(/^\d+/) || $(this.entities.variable)) { + if (! $(')')) throw new(Error)("missing closing ) for alpha()"); + return new(tree.Alpha)(value); + } + }, + + // + // A Selector Element + // + // div + // + h1 + // #socks + // input[type="text"] + // + // Elements are the building blocks for Selectors, + // they are made out of a `Combinator` (see combinator rule), + // and an element name, such as a tag a class, or `*`. + // + element: function () { + var e, t, c; + + c = $(this.combinator); + e = $(/^(?:[.#]?|:*)(?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/) || $('*') || $(this.attribute) || $(/^\([^)@]+\)/); + + if (e) { return new(tree.Element)(c, e) } + }, + + // + // Combinators combine elements together, in a Selector. + // + // Because our parser isn't white-space sensitive, special care + // has to be taken, when parsing the descendant combinator, ` `, + // as it's an empty space. We have to check the previous character + // in the input, to see if it's a ` ` character. More info on how + // we deal with this in *combinator.js*. + // + combinator: function () { + var match, c = input.charAt(i); + + if (c === '>' || c === '&' || c === '+' || c === '~') { + i++; + while (input.charAt(i) === ' ') { i++ } + return new(tree.Combinator)(c); + } else if (c === ':' && input.charAt(i + 1) === ':') { + i += 2; + while (input.charAt(i) === ' ') { i++ } + return new(tree.Combinator)('::'); + } else if (input.charAt(i - 1) === ' ') { + return new(tree.Combinator)(" "); + } else { + return new(tree.Combinator)(null); + } + }, + + // + // A CSS Selector + // + // .class > div + h1 + // li a:hover + // + // Selectors are made out of one or more Elements, see above. + // + selector: function () { + var sel, e, elements = [], c, match; + + while (e = $(this.element)) { + c = input.charAt(i); + elements.push(e) + if (c === '{' || c === '}' || c === ';' || c === ',') { break } + } + + if (elements.length > 0) { return new(tree.Selector)(elements) } + }, + tag: function () { + return $(/^[a-zA-Z][a-zA-Z-]*[0-9]?/) || $('*'); + }, + attribute: function () { + var attr = '', key, val, op; + + if (! $('[')) return; + + if (key = $(/^[a-zA-Z-]+/) || $(this.entities.quoted)) { + if ((op = $(/^[|~*$^]?=/)) && + (val = $(this.entities.quoted) || $(/^[\w-]+/))) { + attr = [key, op, val.toCSS ? val.toCSS() : val].join(''); + } else { attr = key } + } + + if (! $(']')) return; + + if (attr) { return "[" + attr + "]" } + }, + + // + // The `block` rule is used by `ruleset` and `mixin.definition`. + // It's a wrapper around the `primary` rule, with added `{}`. + // + block: function () { + var content; + + if ($('{') && (content = $(this.primary)) && $('}')) { + return content; + } + }, + + // + // div, .class, body > p {...} + // + ruleset: function () { + var selectors = [], s, rules, match; + save(); + + if (match = /^([.#: \w-]+)[\s\n]*\{/.exec(chunks[j])) { + i += match[0].length - 1; + selectors = [new(tree.Selector)([new(tree.Element)(null, match[1])])]; + } else { + while (s = $(this.selector)) { + selectors.push(s); + $(this.comment); + if (! $(',')) { break } + $(this.comment); + } + } + + if (selectors.length > 0 && (rules = $(this.block))) { + return new(tree.Ruleset)(selectors, rules); + } else { + // Backtrack + furthest = i; + restore(); + } + }, + rule: function () { + var name, value, c = input.charAt(i), important, match; + save(); + + if (c === '.' || c === '#' || c === '&') { return } + + if (name = $(this.variable) || $(this.property)) { + if ((name.charAt(0) != '@') && (match = /^([^@+\/'"*`(;{}-]*);/.exec(chunks[j]))) { + i += match[0].length - 1; + value = new(tree.Anonymous)(match[1]); + } else if (name === "font") { + value = $(this.font); + } else { + value = $(this.value); + } + important = $(this.important); + + if (value && $(this.end)) { + return new(tree.Rule)(name, value, important, memo); + } else { + furthest = i; + restore(); + } + } + }, + + // + // An @import directive + // + // @import "lib"; + // + // Depending on our environemnt, importing is done differently: + // In the browser, it's an XHR request, in Node, it would be a + // file-system operation. The function used for importing is + // stored in `import`, which we pass to the Import constructor. + // + "import": function () { + var path; + if ($(/^@import\s+/) && + (path = $(this.entities.quoted) || $(this.entities.url)) && + $(';')) { + return new(tree.Import)(path, imports); + } + }, + + // + // A CSS Directive + // + // @charset "utf-8"; + // + directive: function () { + var name, value, rules, types; + + if (input.charAt(i) !== '@') return; + + if (value = $(this['import'])) { + return value; + } else if (name = $(/^@media|@page|@-[-a-z]+/)) { + types = ($(/^[^{]+/) || '').trim(); + if (rules = $(this.block)) { + return new(tree.Directive)(name + " " + types, rules); + } + } else if (name = $(/^@[-a-z]+/)) { + if (name === '@font-face') { + if (rules = $(this.block)) { + return new(tree.Directive)(name, rules); + } + } else if ((value = $(this.entity)) && $(';')) { + return new(tree.Directive)(name, value); + } + } + }, + font: function () { + var value = [], expression = [], weight, shorthand, font, e; + + while (e = $(this.shorthand) || $(this.entity)) { + expression.push(e); + } + value.push(new(tree.Expression)(expression)); + + if ($(',')) { + while (e = $(this.expression)) { + value.push(e); + if (! $(',')) { break } + } + } + return new(tree.Value)(value); + }, + + // + // A Value is a comma-delimited list of Expressions + // + // font-family: Baskerville, Georgia, serif; + // + // In a Rule, a Value represents everything after the `:`, + // and before the `;`. + // + value: function () { + var e, expressions = [], important; + + while (e = $(this.expression)) { + expressions.push(e); + if (! $(',')) { break } + } + + if (expressions.length > 0) { + return new(tree.Value)(expressions); + } + }, + important: function () { + if (input.charAt(i) === '!') { + return $(/^! *important/); + } + }, + sub: function () { + var e; + + if ($('(') && (e = $(this.expression)) && $(')')) { + return e; + } + }, + multiplication: function () { + var m, a, op, operation; + if (m = $(this.operand)) { + while ((op = ($('/') || $('*'))) && (a = $(this.operand))) { + operation = new(tree.Operation)(op, [operation || m, a]); + } + return operation || m; + } + }, + addition: function () { + var m, a, op, operation; + if (m = $(this.multiplication)) { + while ((op = $(/^[-+]\s+/) || (input.charAt(i - 1) != ' ' && ($('+') || $('-')))) && + (a = $(this.multiplication))) { + operation = new(tree.Operation)(op, [operation || m, a]); + } + return operation || m; + } + }, + + // + // An operand is anything that can be part of an operation, + // such as a Color, or a Variable + // + operand: function () { + var negate, p = input.charAt(i + 1); + + if (input.charAt(i) === '-' && (p === '@' || p === '(')) { negate = $('-') } + var o = $(this.sub) || $(this.entities.dimension) || + $(this.entities.color) || $(this.entities.variable) || + $(this.entities.call); + return negate ? new(tree.Operation)('*', [new(tree.Dimension)(-1), o]) + : o; + }, + + // + // Expressions either represent mathematical operations, + // or white-space delimited Entities. + // + // 1px solid black + // @var * 2 + // + expression: function () { + var e, delim, entities = [], d; + + while (e = $(this.addition) || $(this.entity)) { + entities.push(e); + } + if (entities.length > 0) { + return new(tree.Expression)(entities); + } + }, + property: function () { + var name; + + if (name = $(/^(\*?-?[-a-z_0-9]+)\s*:/)) { + return name[1]; + } + } + } + }; +}; + +if (typeof(window) !== 'undefined') { + // + // Used by `@import` directives + // + less.Parser.importer = function (path, paths, callback, env) { + if (path.charAt(0) !== '/' && paths.length > 0) { + path = paths[0] + path; + } + // We pass `true` as 3rd argument, to force the reload of the import. + // This is so we can get the syntax tree as opposed to just the CSS output, + // as we need this to evaluate the current stylesheet. + loadStyleSheet({ href: path, title: path, type: env.mime }, callback, true); + }; +} + +(function (tree) { + +tree.functions = { + rgb: function (r, g, b) { + return this.rgba(r, g, b, 1.0); + }, + rgba: function (r, g, b, a) { + var rgb = [r, g, b].map(function (c) { return number(c) }), + a = number(a); + return new(tree.Color)(rgb, a); + }, + hsl: function (h, s, l) { + return this.hsla(h, s, l, 1.0); + }, + hsla: function (h, s, l, a) { + h = (number(h) % 360) / 360; + s = number(s); l = number(l); a = number(a); + + var m2 = l <= 0.5 ? l * (s + 1) : l + s - l * s; + var m1 = l * 2 - m2; + + return this.rgba(hue(h + 1/3) * 255, + hue(h) * 255, + hue(h - 1/3) * 255, + a); + + function hue(h) { + h = h < 0 ? h + 1 : (h > 1 ? h - 1 : h); + if (h * 6 < 1) return m1 + (m2 - m1) * h * 6; + else if (h * 2 < 1) return m2; + else if (h * 3 < 2) return m1 + (m2 - m1) * (2/3 - h) * 6; + else return m1; + } + }, + hue: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().h)); + }, + saturation: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().s * 100), '%'); + }, + lightness: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().l * 100), '%'); + }, + alpha: function (color) { + return new(tree.Dimension)(color.toHSL().a); + }, + saturate: function (color, amount) { + var hsl = color.toHSL(); + + hsl.s += amount.value / 100; + hsl.s = clamp(hsl.s); + return hsla(hsl); + }, + desaturate: function (color, amount) { + var hsl = color.toHSL(); + + hsl.s -= amount.value / 100; + hsl.s = clamp(hsl.s); + return hsla(hsl); + }, + lighten: function (color, amount) { + var hsl = color.toHSL(); + + hsl.l += amount.value / 100; + hsl.l = clamp(hsl.l); + return hsla(hsl); + }, + darken: function (color, amount) { + var hsl = color.toHSL(); + + hsl.l -= amount.value / 100; + hsl.l = clamp(hsl.l); + return hsla(hsl); + }, + fadein: function (color, amount) { + var hsl = color.toHSL(); + + hsl.a += amount.value / 100; + hsl.a = clamp(hsl.a); + return hsla(hsl); + }, + fadeout: function (color, amount) { + var hsl = color.toHSL(); + + hsl.a -= amount.value / 100; + hsl.a = clamp(hsl.a); + return hsla(hsl); + }, + spin: function (color, amount) { + var hsl = color.toHSL(); + var hue = (hsl.h + amount.value) % 360; + + hsl.h = hue < 0 ? 360 + hue : hue; + + return hsla(hsl); + }, + // + // Copyright (c) 2006-2009 Hampton Catlin, Nathan Weizenbaum, and Chris Eppstein + // http://sass-lang.com + // + mix: function (color1, color2, weight) { + var p = weight.value / 100.0; + var w = p * 2 - 1; + var a = color1.toHSL().a - color2.toHSL().a; + + var w1 = (((w * a == -1) ? w : (w + a) / (1 + w * a)) + 1) / 2.0; + var w2 = 1 - w1; + + var rgb = [color1.rgb[0] * w1 + color2.rgb[0] * w2, + color1.rgb[1] * w1 + color2.rgb[1] * w2, + color1.rgb[2] * w1 + color2.rgb[2] * w2]; + + var alpha = color1.alpha * p + color2.alpha * (1 - p); + + return new(tree.Color)(rgb, alpha); + }, + greyscale: function (color) { + return this.desaturate(color, new(tree.Dimension)(100)); + }, + e: function (str) { + return new(tree.Anonymous)(str instanceof tree.JavaScript ? str.evaluated : str); + }, + escape: function (str) { + return new(tree.Anonymous)(encodeURI(str.value).replace(/=/g, "%3D").replace(/:/g, "%3A").replace(/#/g, "%23").replace(/;/g, "%3B").replace(/\(/g, "%28").replace(/\)/g, "%29")); + }, + '%': function (quoted /* arg, arg, ...*/) { + var args = Array.prototype.slice.call(arguments, 1), + str = quoted.value; + + for (var i = 0; i < args.length; i++) { + str = str.replace(/%[sda]/i, function(token) { + var value = token.match(/s/i) ? args[i].value : args[i].toCSS(); + return token.match(/[A-Z]$/) ? encodeURIComponent(value) : value; + }); + } + str = str.replace(/%%/g, '%'); + return new(tree.Quoted)('"' + str + '"', str); + }, + round: function (n) { + if (n instanceof tree.Dimension) { + return new(tree.Dimension)(Math.round(number(n)), n.unit); + } else if (typeof(n) === 'number') { + return Math.round(n); + } else { + throw { + error: "RuntimeError", + message: "math functions take numbers as parameters" + }; + } + } +}; + +function hsla(hsla) { + return tree.functions.hsla(hsla.h, hsla.s, hsla.l, hsla.a); +} + +function number(n) { + if (n instanceof tree.Dimension) { + return parseFloat(n.unit == '%' ? n.value / 100 : n.value); + } else if (typeof(n) === 'number') { + return n; + } else { + throw { + error: "RuntimeError", + message: "color functions take numbers as parameters" + }; + } +} + +function clamp(val) { + return Math.min(1, Math.max(0, val)); +} + +})(require('less/tree')); +(function (tree) { + +tree.Alpha = function (val) { + this.value = val; +}; +tree.Alpha.prototype = { + toCSS: function () { + return "alpha(opacity=" + + (this.value.toCSS ? this.value.toCSS() : this.value) + ")"; + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Anonymous = function (string) { + this.value = string.value || string; +}; +tree.Anonymous.prototype = { + toCSS: function () { + return this.value; + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +// +// A function call node. +// +tree.Call = function (name, args) { + this.name = name; + this.args = args; +}; +tree.Call.prototype = { + // + // When evaluating a function call, + // we either find the function in `tree.functions` [1], + // in which case we call it, passing the evaluated arguments, + // or we simply print it out as it appeared originally [2]. + // + // The *functions.js* file contains the built-in functions. + // + // The reason why we evaluate the arguments, is in the case where + // we try to pass a variable to a function, like: `saturate(@color)`. + // The function should receive the value, not the variable. + // + eval: function (env) { + var args = this.args.map(function (a) { return a.eval(env) }); + + if (this.name in tree.functions) { // 1. + return tree.functions[this.name].apply(tree.functions, args); + } else { // 2. + return new(tree.Anonymous)(this.name + + "(" + args.map(function (a) { return a.toCSS() }).join(', ') + ")"); + } + }, + + toCSS: function (env) { + return this.eval(env).toCSS(); + } +}; + +})(require('less/tree')); +(function (tree) { +// +// RGB Colors - #ff0014, #eee +// +tree.Color = function (rgb, a) { + // + // The end goal here, is to parse the arguments + // into an integer triplet, such as `128, 255, 0` + // + // This facilitates operations and conversions. + // + if (Array.isArray(rgb)) { + this.rgb = rgb; + } else if (rgb.length == 6) { + this.rgb = rgb.match(/.{2}/g).map(function (c) { + return parseInt(c, 16); + }); + } else if (rgb.length == 8) { + this.alpha = parseInt(rgb.substring(0,2), 16) / 255.0; + this.rgb = rgb.substr(2).match(/.{2}/g).map(function (c) { + return parseInt(c, 16); + }); + } else { + this.rgb = rgb.split('').map(function (c) { + return parseInt(c + c, 16); + }); + } + this.alpha = typeof(a) === 'number' ? a : 1; +}; +tree.Color.prototype = { + eval: function () { return this }, + + // + // If we have some transparency, the only way to represent it + // is via `rgba`. Otherwise, we use the hex representation, + // which has better compatibility with older browsers. + // Values are capped between `0` and `255`, rounded and zero-padded. + // + toCSS: function () { + if (this.alpha < 1.0) { + return "rgba(" + this.rgb.map(function (c) { + return Math.round(c); + }).concat(this.alpha).join(', ') + ")"; + } else { + return '#' + this.rgb.map(function (i) { + i = Math.round(i); + i = (i > 255 ? 255 : (i < 0 ? 0 : i)).toString(16); + return i.length === 1 ? '0' + i : i; + }).join(''); + } + }, + + // + // Operations have to be done per-channel, if not, + // channels will spill onto each other. Once we have + // our result, in the form of an integer triplet, + // we create a new Color node to hold the result. + // + operate: function (op, other) { + var result = []; + + if (! (other instanceof tree.Color)) { + other = other.toColor(); + } + + for (var c = 0; c < 3; c++) { + result[c] = tree.operate(op, this.rgb[c], other.rgb[c]); + } + return new(tree.Color)(result, this.alpha + other.alpha); + }, + + toHSL: function () { + var r = this.rgb[0] / 255, + g = this.rgb[1] / 255, + b = this.rgb[2] / 255, + a = this.alpha; + + var max = Math.max(r, g, b), min = Math.min(r, g, b); + var h, s, l = (max + min) / 2, d = max - min; + + if (max === min) { + h = s = 0; + } else { + s = l > 0.5 ? d / (2 - max - min) : d / (max + min); + + switch (max) { + case r: h = (g - b) / d + (g < b ? 6 : 0); break; + case g: h = (b - r) / d + 2; break; + case b: h = (r - g) / d + 4; break; + } + h /= 6; + } + return { h: h * 360, s: s, l: l, a: a }; + } +}; + + +})(require('less/tree')); +(function (tree) { + +tree.Comment = function (value, silent) { + this.value = value; + this.silent = !!silent; +}; +tree.Comment.prototype = { + toCSS: function (env) { + return env.compress ? '' : this.value; + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +// +// A number with a unit +// +tree.Dimension = function (value, unit) { + this.value = parseFloat(value); + this.unit = unit || null; +}; + +tree.Dimension.prototype = { + eval: function () { return this }, + toColor: function () { + return new(tree.Color)([this.value, this.value, this.value]); + }, + toCSS: function () { + var css = this.value + this.unit; + return css; + }, + + // In an operation between two Dimensions, + // we default to the first Dimension's unit, + // so `1px + 2em` will yield `3px`. + // In the future, we could implement some unit + // conversions such that `100cm + 10mm` would yield + // `101cm`. + operate: function (op, other) { + return new(tree.Dimension) + (tree.operate(op, this.value, other.value), + this.unit || other.unit); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Directive = function (name, value) { + this.name = name; + if (Array.isArray(value)) { + this.ruleset = new(tree.Ruleset)([], value); + } else { + this.value = value; + } +}; +tree.Directive.prototype = { + toCSS: function (ctx, env) { + if (this.ruleset) { + this.ruleset.root = true; + return this.name + (env.compress ? '{' : ' {\n ') + + this.ruleset.toCSS(ctx, env).trim().replace(/\n/g, '\n ') + + (env.compress ? '}': '\n}\n'); + } else { + return this.name + ' ' + this.value.toCSS() + ';\n'; + } + }, + eval: function (env) { + env.frames.unshift(this); + this.ruleset = this.ruleset && this.ruleset.eval(env); + env.frames.shift(); + return this; + }, + variable: function (name) { return tree.Ruleset.prototype.variable.call(this.ruleset, name) }, + find: function () { return tree.Ruleset.prototype.find.apply(this.ruleset, arguments) }, + rulesets: function () { return tree.Ruleset.prototype.rulesets.apply(this.ruleset) } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Element = function (combinator, value) { + this.combinator = combinator instanceof tree.Combinator ? + combinator : new(tree.Combinator)(combinator); + this.value = value.trim(); +}; +tree.Element.prototype.toCSS = function (env) { + return this.combinator.toCSS(env || {}) + this.value; +}; + +tree.Combinator = function (value) { + if (value === ' ') { + this.value = ' '; + } else { + this.value = value ? value.trim() : ""; + } +}; +tree.Combinator.prototype.toCSS = function (env) { + return { + '' : '', + ' ' : ' ', + '&' : '', + ':' : ' :', + '::': '::', + '+' : env.compress ? '+' : ' + ', + '~' : env.compress ? '~' : ' ~ ', + '>' : env.compress ? '>' : ' > ' + }[this.value]; +}; + +})(require('less/tree')); +(function (tree) { + +tree.Expression = function (value) { this.value = value }; +tree.Expression.prototype = { + eval: function (env) { + if (this.value.length > 1) { + return new(tree.Expression)(this.value.map(function (e) { + return e.eval(env); + })); + } else if (this.value.length === 1) { + return this.value[0].eval(env); + } else { + return this; + } + }, + toCSS: function (env) { + return this.value.map(function (e) { + return e.toCSS(env); + }).join(' '); + } +}; + +})(require('less/tree')); +(function (tree) { +// +// CSS @import node +// +// The general strategy here is that we don't want to wait +// for the parsing to be completed, before we start importing +// the file. That's because in the context of a browser, +// most of the time will be spent waiting for the server to respond. +// +// On creation, we push the import path to our import queue, though +// `import,push`, we also pass it a callback, which it'll call once +// the file has been fetched, and parsed. +// +tree.Import = function (path, imports) { + var that = this; + + this._path = path; + + // The '.less' extension is optional + if (path instanceof tree.Quoted) { + this.path = /\.(le?|c)ss$/.test(path.value) ? path.value : path.value + '.less'; + } else { + this.path = path.value.value || path.value; + } + + this.css = /css$/.test(this.path); + + // Only pre-compile .less files + if (! this.css) { + imports.push(this.path, function (root) { + if (! root) { + throw new(Error)("Error parsing " + that.path); + } + that.root = root; + }); + } +}; + +// +// The actual import node doesn't return anything, when converted to CSS. +// The reason is that it's used at the evaluation stage, so that the rules +// it imports can be treated like any other rules. +// +// In `eval`, we make sure all Import nodes get evaluated, recursively, so +// we end up with a flat structure, which can easily be imported in the parent +// ruleset. +// +tree.Import.prototype = { + toCSS: function () { + if (this.css) { + return "@import " + this._path.toCSS() + ';\n'; + } else { + return ""; + } + }, + eval: function (env) { + var ruleset; + + if (this.css) { + return this; + } else { + ruleset = new(tree.Ruleset)(null, this.root.rules.slice(0)); + + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.Import) { + Array.prototype + .splice + .apply(ruleset.rules, + [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + return ruleset.rules; + } + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.JavaScript = function (string, index, escaped) { + this.escaped = escaped; + this.expression = string; + this.index = index; +}; +tree.JavaScript.prototype = { + eval: function (env) { + var result, + that = this, + context = {}; + + var expression = this.expression.replace(/@\{([\w-]+)\}/g, function (_, name) { + return tree.jsify(new(tree.Variable)('@' + name, that.index).eval(env)); + }); + + try { + expression = new(Function)('return (' + expression + ')'); + } catch (e) { + throw { message: "JavaScript evaluation error: `" + expression + "`" , + index: this.index }; + } + + for (var k in env.frames[0].variables()) { + context[k.slice(1)] = { + value: env.frames[0].variables()[k].value, + toJS: function () { + return this.value.eval(env).toCSS(); + } + }; + } + + try { + result = expression.call(context); + } catch (e) { + throw { message: "JavaScript evaluation error: '" + e.name + ': ' + e.message + "'" , + index: this.index }; + } + if (typeof(result) === 'string') { + return new(tree.Quoted)('"' + result + '"', result, this.escaped, this.index); + } else if (Array.isArray(result)) { + return new(tree.Anonymous)(result.join(', ')); + } else { + return new(tree.Anonymous)(result); + } + } +}; + +})(require('less/tree')); + +(function (tree) { + +tree.Keyword = function (value) { this.value = value }; +tree.Keyword.prototype = { + eval: function () { return this }, + toCSS: function () { return this.value } +}; + +})(require('less/tree')); +(function (tree) { + +tree.mixin = {}; +tree.mixin.Call = function (elements, args, index) { + this.selector = new(tree.Selector)(elements); + this.arguments = args; + this.index = index; +}; +tree.mixin.Call.prototype = { + eval: function (env) { + var mixins, rules = [], match = false; + + for (var i = 0; i < env.frames.length; i++) { + if ((mixins = env.frames[i].find(this.selector)).length > 0) { + for (var m = 0; m < mixins.length; m++) { + if (mixins[m].match(this.arguments, env)) { + try { + Array.prototype.push.apply( + rules, mixins[m].eval(env, this.arguments).rules); + match = true; + } catch (e) { + throw { message: e.message, index: e.index, stack: e.stack, call: this.index }; + } + } + } + if (match) { + return rules; + } else { + throw { message: 'No matching definition was found for `' + + this.selector.toCSS().trim() + '(' + + this.arguments.map(function (a) { + return a.toCSS(); + }).join(', ') + ")`", + index: this.index }; + } + } + } + throw { message: this.selector.toCSS().trim() + " is undefined", + index: this.index }; + } +}; + +tree.mixin.Definition = function (name, params, rules) { + this.name = name; + this.selectors = [new(tree.Selector)([new(tree.Element)(null, name)])]; + this.params = params; + this.arity = params.length; + this.rules = rules; + this._lookups = {}; + this.required = params.reduce(function (count, p) { + if (!p.name || (p.name && !p.value)) { return count + 1 } + else { return count } + }, 0); + this.parent = tree.Ruleset.prototype; + this.frames = []; +}; +tree.mixin.Definition.prototype = { + toCSS: function () { return "" }, + variable: function (name) { return this.parent.variable.call(this, name) }, + variables: function () { return this.parent.variables.call(this) }, + find: function () { return this.parent.find.apply(this, arguments) }, + rulesets: function () { return this.parent.rulesets.apply(this) }, + + eval: function (env, args) { + var frame = new(tree.Ruleset)(null, []), context, _arguments = []; + + for (var i = 0, val; i < this.params.length; i++) { + if (this.params[i].name) { + if (val = (args && args[i]) || this.params[i].value) { + frame.rules.unshift(new(tree.Rule)(this.params[i].name, val.eval(env))); + } else { + throw { message: "wrong number of arguments for " + this.name + + ' (' + args.length + ' for ' + this.arity + ')' }; + } + } + } + for (var i = 0; i < Math.max(this.params.length, args && args.length); i++) { + _arguments.push(args[i] || this.params[i].value); + } + frame.rules.unshift(new(tree.Rule)('@arguments', new(tree.Expression)(_arguments).eval(env))); + + return new(tree.Ruleset)(null, this.rules.slice(0)).eval({ + frames: [this, frame].concat(this.frames, env.frames) + }); + }, + match: function (args, env) { + var argsLength = (args && args.length) || 0, len; + + if (argsLength < this.required) { return false } + if ((this.required > 0) && (argsLength > this.params.length)) { return false } + + len = Math.min(argsLength, this.arity); + + for (var i = 0; i < len; i++) { + if (!this.params[i].name) { + if (args[i].eval(env).toCSS() != this.params[i].value.eval(env).toCSS()) { + return false; + } + } + } + return true; + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Operation = function (op, operands) { + this.op = op.trim(); + this.operands = operands; +}; +tree.Operation.prototype.eval = function (env) { + var a = this.operands[0].eval(env), + b = this.operands[1].eval(env), + temp; + + if (a instanceof tree.Dimension && b instanceof tree.Color) { + if (this.op === '*' || this.op === '+') { + temp = b, b = a, a = temp; + } else { + throw { name: "OperationError", + message: "Can't substract or divide a color from a number" }; + } + } + return a.operate(this.op, b); +}; + +tree.operate = function (op, a, b) { + switch (op) { + case '+': return a + b; + case '-': return a - b; + case '*': return a * b; + case '/': return a / b; + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Quoted = function (str, content, escaped, i) { + this.escaped = escaped; + this.value = content || ''; + this.quote = str.charAt(0); + this.index = i; +}; +tree.Quoted.prototype = { + toCSS: function () { + if (this.escaped) { + return this.value; + } else { + return this.quote + this.value + this.quote; + } + }, + eval: function (env) { + var that = this; + this.value = this.value.replace(/`([^`]+)`/g, function (_, exp) { + return new(tree.JavaScript)(exp, that.index, true).eval(env).value; + }).replace(/@\{([\w-]+)\}/g, function (_, name) { + return new(tree.Variable)('@' + name, that.index).eval(env).value; + }); + return this; + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Rule = function (name, value, important, index) { + this.name = name; + this.value = (value instanceof tree.Value) ? value : new(tree.Value)([value]); + this.important = important ? ' ' + important.trim() : ''; + this.index = index; + + if (name.charAt(0) === '@') { + this.variable = true; + } else { this.variable = false } +}; +tree.Rule.prototype.toCSS = function (env) { + if (this.variable) { return "" } + else { + return this.name + (env.compress ? ':' : ': ') + + this.value.toCSS(env) + + this.important + ";"; + } +}; + +tree.Rule.prototype.eval = function (context) { + return new(tree.Rule)(this.name, this.value.eval(context), this.important, this.index); +}; + +tree.Shorthand = function (a, b) { + this.a = a; + this.b = b; +}; + +tree.Shorthand.prototype = { + toCSS: function (env) { + return this.a.toCSS(env) + "/" + this.b.toCSS(env); + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Ruleset = function (selectors, rules) { + this.selectors = selectors; + this.rules = rules; + this._lookups = {}; +}; +tree.Ruleset.prototype = { + eval: function (env) { + var ruleset = new(tree.Ruleset)(this.selectors, this.rules.slice(0)); + + ruleset.root = this.root; + + // push the current ruleset to the frames stack + env.frames.unshift(ruleset); + + // Evaluate imports + if (ruleset.root) { + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.Import) { + Array.prototype.splice + .apply(ruleset.rules, [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + } + + // Store the frames around mixin definitions, + // so they can be evaluated like closures when the time comes. + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.mixin.Definition) { + ruleset.rules[i].frames = env.frames.slice(0); + } + } + + // Evaluate mixin calls. + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.mixin.Call) { + Array.prototype.splice + .apply(ruleset.rules, [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + + // Evaluate everything else + for (var i = 0, rule; i < ruleset.rules.length; i++) { + rule = ruleset.rules[i]; + + if (! (rule instanceof tree.mixin.Definition)) { + ruleset.rules[i] = rule.eval ? rule.eval(env) : rule; + } + } + + // Pop the stack + env.frames.shift(); + + return ruleset; + }, + match: function (args) { + return !args || args.length === 0; + }, + variables: function () { + if (this._variables) { return this._variables } + else { + return this._variables = this.rules.reduce(function (hash, r) { + if (r instanceof tree.Rule && r.variable === true) { + hash[r.name] = r; + } + return hash; + }, {}); + } + }, + variable: function (name) { + return this.variables()[name]; + }, + rulesets: function () { + if (this._rulesets) { return this._rulesets } + else { + return this._rulesets = this.rules.filter(function (r) { + return (r instanceof tree.Ruleset) || (r instanceof tree.mixin.Definition); + }); + } + }, + find: function (selector, self) { + self = self || this; + var rules = [], rule, match, + key = selector.toCSS(); + + if (key in this._lookups) { return this._lookups[key] } + + this.rulesets().forEach(function (rule) { + if (rule !== self) { + for (var j = 0; j < rule.selectors.length; j++) { + if (match = selector.match(rule.selectors[j])) { + if (selector.elements.length > 1) { + Array.prototype.push.apply(rules, rule.find( + new(tree.Selector)(selector.elements.slice(1)), self)); + } else { + rules.push(rule); + } + break; + } + } + } + }); + return this._lookups[key] = rules; + }, + // + // Entry point for code generation + // + // `context` holds an array of arrays. + // + toCSS: function (context, env) { + var css = [], // The CSS output + rules = [], // node.Rule instances + rulesets = [], // node.Ruleset instances + paths = [], // Current selectors + selector, // The fully rendered selector + rule; + + if (! this.root) { + if (context.length === 0) { + paths = this.selectors.map(function (s) { return [s] }); + } else { + for (var s = 0; s < this.selectors.length; s++) { + for (var c = 0; c < context.length; c++) { + paths.push(context[c].concat([this.selectors[s]])); + } + } + } + } + + // Compile rules and rulesets + for (var i = 0; i < this.rules.length; i++) { + rule = this.rules[i]; + + if (rule.rules || (rule instanceof tree.Directive)) { + rulesets.push(rule.toCSS(paths, env)); + } else if (rule instanceof tree.Comment) { + if (!rule.silent) { + if (this.root) { + rulesets.push(rule.toCSS(env)); + } else { + rules.push(rule.toCSS(env)); + } + } + } else { + if (rule.toCSS && !rule.variable) { + rules.push(rule.toCSS(env)); + } else if (rule.value && !rule.variable) { + rules.push(rule.value.toString()); + } + } + } + + rulesets = rulesets.join(''); + + // If this is the root node, we don't render + // a selector, or {}. + // Otherwise, only output if this ruleset has rules. + if (this.root) { + css.push(rules.join(env.compress ? '' : '\n')); + } else { + if (rules.length > 0) { + selector = paths.map(function (p) { + return p.map(function (s) { + return s.toCSS(env); + }).join('').trim(); + }).join(env.compress ? ',' : (paths.length > 3 ? ',\n' : ', ')); + css.push(selector, + (env.compress ? '{' : ' {\n ') + + rules.join(env.compress ? '' : '\n ') + + (env.compress ? '}' : '\n}\n')); + } + } + css.push(rulesets); + + return css.join('') + (env.compress ? '\n' : ''); + } +}; +})(require('less/tree')); +(function (tree) { + +tree.Selector = function (elements) { + this.elements = elements; + if (this.elements[0].combinator.value === "") { + this.elements[0].combinator.value = ' '; + } +}; +tree.Selector.prototype.match = function (other) { + if (this.elements[0].value === other.elements[0].value) { + return true; + } else { + return false; + } +}; +tree.Selector.prototype.toCSS = function (env) { + if (this._css) { return this._css } + + return this._css = this.elements.map(function (e) { + if (typeof(e) === 'string') { + return ' ' + e.trim(); + } else { + return e.toCSS(env); + } + }).join(''); +}; + +})(require('less/tree')); +(function (tree) { + +tree.URL = function (val, paths) { + if (val.data) { + this.attrs = val; + } else { + // Add the base path if the URL is relative and we are in the browser + if (!/^(?:https?:\/|file:\/|data:\/)?\//.test(val.value) && paths.length > 0 && typeof(window) !== 'undefined') { + val.value = paths[0] + (val.value.charAt(0) === '/' ? val.value.slice(1) : val.value); + } + this.value = val; + this.paths = paths; + } +}; +tree.URL.prototype = { + toCSS: function () { + return "url(" + (this.attrs ? 'data:' + this.attrs.mime + this.attrs.charset + this.attrs.base64 + this.attrs.data + : this.value.toCSS()) + ")"; + }, + eval: function (ctx) { + return this.attrs ? this : new(tree.URL)(this.value.eval(ctx), this.paths); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Value = function (value) { + this.value = value; + this.is = 'value'; +}; +tree.Value.prototype = { + eval: function (env) { + if (this.value.length === 1) { + return this.value[0].eval(env); + } else { + return new(tree.Value)(this.value.map(function (v) { + return v.eval(env); + })); + } + }, + toCSS: function (env) { + return this.value.map(function (e) { + return e.toCSS(env); + }).join(env.compress ? ',' : ', '); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Variable = function (name, index) { this.name = name, this.index = index }; +tree.Variable.prototype = { + eval: function (env) { + var variable, v, name = this.name; + + if (name.indexOf('@@') == 0) { + name = '@' + new(tree.Variable)(name.slice(1)).eval(env).value; + } + + if (variable = tree.find(env.frames, function (frame) { + if (v = frame.variable(name)) { + return v.value.eval(env); + } + })) { return variable } + else { + throw { message: "variable " + name + " is undefined", + index: this.index }; + } + } +}; + +})(require('less/tree')); +require('less/tree').find = function (obj, fun) { + for (var i = 0, r; i < obj.length; i++) { + if (r = fun.call(obj, obj[i])) { return r } + } + return null; +}; +require('less/tree').jsify = function (obj) { + if (Array.isArray(obj.value) && (obj.value.length > 1)) { + return '[' + obj.value.map(function (v) { return v.toCSS(false) }).join(', ') + ']'; + } else { + return obj.toCSS(false); + } +}; +// +// browser.js - client-side engine +// + +var isFileProtocol = (location.protocol === 'file:' || + location.protocol === 'chrome:' || + location.protocol === 'chrome-extension:' || + location.protocol === 'resource:'); + +less.env = less.env || (location.hostname == '127.0.0.1' || + location.hostname == '0.0.0.0' || + location.hostname == 'localhost' || + location.port.length > 0 || + isFileProtocol ? 'development' + : 'production'); + +// Load styles asynchronously (default: false) +// +// This is set to `false` by default, so that the body +// doesn't start loading before the stylesheets are parsed. +// Setting this to `true` can result in flickering. +// +less.async = false; + +// Interval between watch polls +less.poll = less.poll || (isFileProtocol ? 1000 : 1500); + +// +// Watch mode +// +less.watch = function () { return this.watchMode = true }; +less.unwatch = function () { return this.watchMode = false }; + +if (less.env === 'development') { + less.optimization = 0; + + if (/!watch/.test(location.hash)) { + less.watch(); + } + less.watchTimer = setInterval(function () { + if (less.watchMode) { + loadStyleSheets(function (root, sheet, env) { + if (root) { + createCSS(root.toCSS(), sheet, env.lastModified); + } + }); + } + }, less.poll); +} else { + less.optimization = 3; +} + +var cache; + +try { + cache = (typeof(window.localStorage) === 'undefined') ? null : window.localStorage; +} catch (_) { + cache = null; +} + +// +// Get all tags with the 'rel' attribute set to "stylesheet/less" +// +var links = document.getElementsByTagName('link'); +var typePattern = /^text\/(x-)?less$/; + +less.sheets = []; + +for (var i = 0; i < links.length; i++) { + if (links[i].rel === 'stylesheet/less' || (links[i].rel.match(/stylesheet/) && + (links[i].type.match(typePattern)))) { + less.sheets.push(links[i]); + } +} + + +less.refresh = function (reload) { + var startTime, endTime; + startTime = endTime = new(Date); + + loadStyleSheets(function (root, sheet, env) { + if (env.local) { + log("loading " + sheet.href + " from cache."); + } else { + log("parsed " + sheet.href + " successfully."); + createCSS(root.toCSS(), sheet, env.lastModified); + } + log("css for " + sheet.href + " generated in " + (new(Date) - endTime) + 'ms'); + (env.remaining === 0) && log("css generated in " + (new(Date) - startTime) + 'ms'); + endTime = new(Date); + }, reload); + + loadStyles(); +}; +less.refreshStyles = loadStyles; + +less.refresh(less.env === 'development'); + +function loadStyles() { + var styles = document.getElementsByTagName('style'); + for (var i = 0; i < styles.length; i++) { + if (styles[i].type.match(typePattern)) { + new(less.Parser)().parse(styles[i].innerHTML || '', function (e, tree) { + styles[i].type = 'text/css'; + styles[i].innerHTML = tree.toCSS(); + }); + } + } +} + +function loadStyleSheets(callback, reload) { + for (var i = 0; i < less.sheets.length; i++) { + loadStyleSheet(less.sheets[i], callback, reload, less.sheets.length - (i + 1)); + } +} + +function loadStyleSheet(sheet, callback, reload, remaining) { + var url = window.location.href.replace(/[#?].*$/, ''); + var href = sheet.href.replace(/\?.*$/, ''); + var css = cache && cache.getItem(href); + var timestamp = cache && cache.getItem(href + ':timestamp'); + var styles = { css: css, timestamp: timestamp }; + + // Stylesheets in IE don't always return the full path + if (! /^(https?|file):/.test(href)) { + if (href.charAt(0) == "/") { + href = window.location.protocol + "//" + window.location.host + href; + } else { + href = url.slice(0, url.lastIndexOf('/') + 1) + href; + } + } + + xhr(sheet.href, sheet.type, function (data, lastModified) { + if (!reload && styles && lastModified && + (new(Date)(lastModified).valueOf() === + new(Date)(styles.timestamp).valueOf())) { + // Use local copy + createCSS(styles.css, sheet); + callback(null, sheet, { local: true, remaining: remaining }); + } else { + // Use remote copy (re-parse) + try { + new(less.Parser)({ + optimization: less.optimization, + paths: [href.replace(/[\w\.-]+$/, '')], + mime: sheet.type + }).parse(data, function (e, root) { + if (e) { return error(e, href) } + try { + callback(root, sheet, { local: false, lastModified: lastModified, remaining: remaining }); + removeNode(document.getElementById('less-error-message:' + extractId(href))); + } catch (e) { + error(e, href); + } + }); + } catch (e) { + error(e, href); + } + } + }, function (status, url) { + throw new(Error)("Couldn't load " + url + " (" + status + ")"); + }); +} + +function extractId(href) { + return href.replace(/^[a-z]+:\/\/?[^\/]+/, '' ) // Remove protocol & domain + .replace(/^\//, '' ) // Remove root / + .replace(/\?.*$/, '' ) // Remove query + .replace(/\.[^\.\/]+$/, '' ) // Remove file extension + .replace(/[^\.\w-]+/g, '-') // Replace illegal characters + .replace(/\./g, ':'); // Replace dots with colons(for valid id) +} + +function createCSS(styles, sheet, lastModified) { + var css; + + // Strip the query-string + var href = sheet.href ? sheet.href.replace(/\?.*$/, '') : ''; + + // If there is no title set, use the filename, minus the extension + var id = 'less:' + (sheet.title || extractId(href)); + + // If the stylesheet doesn't exist, create a new node + if ((css = document.getElementById(id)) === null) { + css = document.createElement('style'); + css.type = 'text/css'; + css.media = sheet.media || 'screen'; + css.id = id; + document.getElementsByTagName('head')[0].appendChild(css); + } + + if (css.styleSheet) { // IE + try { + css.styleSheet.cssText = styles; + } catch (e) { + throw new(Error)("Couldn't reassign styleSheet.cssText."); + } + } else { + (function (node) { + if (css.childNodes.length > 0) { + if (css.firstChild.nodeValue !== node.nodeValue) { + css.replaceChild(node, css.firstChild); + } + } else { + css.appendChild(node); + } + })(document.createTextNode(styles)); + } + + // Don't update the local store if the file wasn't modified + if (lastModified && cache) { + log('saving ' + href + ' to cache.'); + cache.setItem(href, styles); + cache.setItem(href + ':timestamp', lastModified); + } +} + +function xhr(url, type, callback, errback) { + var xhr = getXMLHttpRequest(); + var async = isFileProtocol ? false : less.async; + + if (typeof(xhr.overrideMimeType) === 'function') { + xhr.overrideMimeType('text/css'); + } + xhr.open('GET', url, async); + xhr.setRequestHeader('Accept', type || 'text/x-less, text/css; q=0.9, */*; q=0.5'); + xhr.send(null); + + if (isFileProtocol) { + if (xhr.status === 0) { + callback(xhr.responseText); + } else { + errback(xhr.status, url); + } + } else if (async) { + xhr.onreadystatechange = function () { + if (xhr.readyState == 4) { + handleResponse(xhr, callback, errback); + } + }; + } else { + handleResponse(xhr, callback, errback); + } + + function handleResponse(xhr, callback, errback) { + if (xhr.status >= 200 && xhr.status < 300) { + callback(xhr.responseText, + xhr.getResponseHeader("Last-Modified")); + } else if (typeof(errback) === 'function') { + errback(xhr.status, url); + } + } +} + +function getXMLHttpRequest() { + if (window.XMLHttpRequest) { + return new(XMLHttpRequest); + } else { + try { + return new(ActiveXObject)("MSXML2.XMLHTTP.3.0"); + } catch (e) { + log("browser doesn't support AJAX."); + return null; + } + } +} + +function removeNode(node) { + return node && node.parentNode.removeChild(node); +} + +function log(str) { + if (less.env == 'development' && typeof(console) !== "undefined") { console.log('less: ' + str) } +} + +function error(e, href) { + var id = 'less-error-message:' + extractId(href); + + var template = ['
      ', + '
    • {0}
    • ', + '
    • {current}
    • ', + '
    • {2}
    • ', + '
    '].join('\n'); + + var elem = document.createElement('div'), timer, content; + + elem.id = id; + elem.className = "less-error-message"; + + content = '

    ' + (e.message || 'There is an error in your .less file') + + '

    ' + '

    ' + href + " "; + + if (e.extract) { + content += 'on line ' + e.line + ', column ' + (e.column + 1) + ':

    ' + + template.replace(/\[(-?\d)\]/g, function (_, i) { + return (parseInt(e.line) + parseInt(i)) || ''; + }).replace(/\{(\d)\}/g, function (_, i) { + return e.extract[parseInt(i)] || ''; + }).replace(/\{current\}/, e.extract[1].slice(0, e.column) + '' + + e.extract[1].slice(e.column) + ''); + } + elem.innerHTML = content; + + // CSS for error messages + createCSS([ + '.less-error-message ul, .less-error-message li {', + 'list-style-type: none;', + 'margin-right: 15px;', + 'padding: 4px 0;', + 'margin: 0;', + '}', + '.less-error-message label {', + 'font-size: 12px;', + 'margin-right: 15px;', + 'padding: 4px 0;', + 'color: #cc7777;', + '}', + '.less-error-message pre {', + 'color: #ee4444;', + 'padding: 4px 0;', + 'margin: 0;', + 'display: inline-block;', + '}', + '.less-error-message pre.ctx {', + 'color: #dd4444;', + '}', + '.less-error-message h3 {', + 'font-size: 20px;', + 'font-weight: bold;', + 'padding: 15px 0 5px 0;', + 'margin: 0;', + '}', + '.less-error-message a {', + 'color: #10a', + '}', + '.less-error-message .error {', + 'color: red;', + 'font-weight: bold;', + 'padding-bottom: 2px;', + 'border-bottom: 1px dashed red;', + '}' + ].join('\n'), { title: 'error-message' }); + + elem.style.cssText = [ + "font-family: Arial, sans-serif", + "border: 1px solid #e00", + "background-color: #eee", + "border-radius: 5px", + "-webkit-border-radius: 5px", + "-moz-border-radius: 5px", + "color: #e00", + "padding: 15px", + "margin-bottom: 15px" + ].join(';'); + + if (less.env == 'development') { + timer = setInterval(function () { + if (document.body) { + if (document.getElementById(id)) { + document.body.replaceChild(elem, document.getElementById(id)); + } else { + document.body.insertBefore(elem, document.body.firstChild); + } + clearInterval(timer); + } + }, 10); + } +} + +})(window); diff --git a/src/dashboard/src/media/vendor/less.js/dist/less-1.1.1.min.js b/src/dashboard/src/media/vendor/less.js/dist/less-1.1.1.min.js new file mode 100644 index 0000000000..c204123ec7 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/dist/less-1.1.1.min.js @@ -0,0 +1,16 @@ +// +// LESS - Leaner CSS v1.1.1 +// http://lesscss.org +// +// Copyright (c) 2009-2011, Alexis Sellier +// Licensed under the Apache 2.0 License. +// +// +// LESS - Leaner CSS v1.1.1 +// http://lesscss.org +// +// Copyright (c) 2009-2011, Alexis Sellier +// Licensed under the Apache 2.0 License. +// +(function(a,b){function v(a,b){var c="less-error-message:"+p(b),e=["
      ",'
    • {0}
    • ',"
    • {current}
    • ",'
    • {2}
    • ',"
    "].join("\n"),f=document.createElement("div"),g,h;f.id=c,f.className="less-error-message",h="

    "+(a.message||"There is an error in your .less file")+"

    "+'

    '+b+" ",a.extract&&(h+="on line "+a.line+", column "+(a.column+1)+":

    "+e.replace(/\[(-?\d)\]/g,function(b,c){return parseInt(a.line)+parseInt(c)||""}).replace(/\{(\d)\}/g,function(b,c){return a.extract[parseInt(c)]||""}).replace(/\{current\}/,a.extract[1].slice(0,a.column)+''+a.extract[1].slice(a.column)+"")),f.innerHTML=h,q([".less-error-message ul, .less-error-message li {","list-style-type: none;","margin-right: 15px;","padding: 4px 0;","margin: 0;","}",".less-error-message label {","font-size: 12px;","margin-right: 15px;","padding: 4px 0;","color: #cc7777;","}",".less-error-message pre {","color: #ee4444;","padding: 4px 0;","margin: 0;","display: inline-block;","}",".less-error-message pre.ctx {","color: #dd4444;","}",".less-error-message h3 {","font-size: 20px;","font-weight: bold;","padding: 15px 0 5px 0;","margin: 0;","}",".less-error-message a {","color: #10a","}",".less-error-message .error {","color: red;","font-weight: bold;","padding-bottom: 2px;","border-bottom: 1px dashed red;","}"].join("\n"),{title:"error-message"}),f.style.cssText=["font-family: Arial, sans-serif","border: 1px solid #e00","background-color: #eee","border-radius: 5px","-webkit-border-radius: 5px","-moz-border-radius: 5px","color: #e00","padding: 15px","margin-bottom: 15px"].join(";"),d.env=="development"&&(g=setInterval(function(){document.body&&(document.getElementById(c)?document.body.replaceChild(f,document.getElementById(c)):document.body.insertBefore(f,document.body.firstChild),clearInterval(g))},10))}function u(a){d.env=="development"&&typeof console!="undefined"&&console.log("less: "+a)}function t(a){return a&&a.parentNode.removeChild(a)}function s(){if(a.XMLHttpRequest)return new XMLHttpRequest;try{return new ActiveXObject("MSXML2.XMLHTTP.3.0")}catch(b){u("browser doesn't support AJAX.");return null}}function r(a,b,c,e){function i(b,c,d){b.status>=200&&b.status<300?c(b.responseText,b.getResponseHeader("Last-Modified")):typeof d=="function"&&d(b.status,a)}var f=s(),h=g?!1:d.async;typeof f.overrideMimeType=="function"&&f.overrideMimeType("text/css"),f.open("GET",a,h),f.setRequestHeader("Accept",b||"text/x-less, text/css; q=0.9, */*; q=0.5"),f.send(null),g?f.status===0?c(f.responseText):e(f.status,a):h?f.onreadystatechange=function(){f.readyState==4&&i(f,c,e)}:i(f,c,e)}function q(a,b,c){var d,e=b.href?b.href.replace(/\?.*$/,""):"",f="less:"+(b.title||p(e));(d=document.getElementById(f))===null&&(d=document.createElement("style"),d.type="text/css",d.media=b.media||"screen",d.id=f,document.getElementsByTagName("head")[0].appendChild(d));if(d.styleSheet)try{d.styleSheet.cssText=a}catch(g){throw new Error("Couldn't reassign styleSheet.cssText.")}else(function(a){d.childNodes.length>0?d.firstChild.nodeValue!==a.nodeValue&&d.replaceChild(a,d.firstChild):d.appendChild(a)})(document.createTextNode(a));c&&h&&(u("saving "+e+" to cache."),h.setItem(e,a),h.setItem(e+":timestamp",c))}function p(a){return a.replace(/^[a-z]+:\/\/?[^\/]+/,"").replace(/^\//,"").replace(/\?.*$/,"").replace(/\.[^\.\/]+$/,"").replace(/[^\.\w-]+/g,"-").replace(/\./g,":")}function o(b,c,e,f){var g=a.location.href.replace(/[#?].*$/,""),i=b.href.replace(/\?.*$/,""),j=h&&h.getItem(i),k=h&&h.getItem(i+":timestamp"),l={css:j,timestamp:k};/^(https?|file):/.test(i)||(i.charAt(0)=="/"?i=a.location.protocol+"//"+a.location.host+i:i=g.slice(0,g.lastIndexOf("/")+1)+i),r(b.href,b.type,function(a,g){if(!e&&l&&g&&(new Date(g)).valueOf()===(new Date(l.timestamp)).valueOf())q(l.css,b),c(null,b,{local:!0,remaining:f});else try{(new d.Parser({optimization:d.optimization,paths:[i.replace(/[\w\.-]+$/,"")],mime:b.type})).parse(a,function(a,d){if(a)return v(a,i);try{c(d,b,{local:!1,lastModified:g,remaining:f}),t(document.getElementById("less-error-message:"+p(i)))}catch(a){v(a,i)}})}catch(h){v(h,i)}},function(a,b){throw new Error("Couldn't load "+b+" ("+a+")")})}function n(a,b){for(var c=0;c>>0;for(var d=0;d>>0,c=Array(b),d=arguments[1];for(var e=0;e>>0,c=0;if(b===0&&arguments.length===1)throw new TypeError;if(arguments.length>=2)var d=arguments[1];else for(;;){if(c in this){d=this[c++];break}if(++c>=b)throw new TypeError}for(;c=b)return-1;c<0&&(c+=b);for(;ck&&(j[f]=j[f].slice(c-k),k=c)}function q(){j[f]=g,c=h,k=c}function p(){g=j[f],h=c,k=c}var b,c,f,g,h,i,j,k,l,m=this,n=function(){},o=this.imports={paths:a&&a.paths||[],queue:[],files:{},mime:a&&a.mime,push:function(b,c){var e=this;this.queue.push(b),d.Parser.importer(b,this.paths,function(a){e.queue.splice(e.queue.indexOf(b),1),e.files[b]=a,c(a),e.queue.length===0&&n()},a)}};this.env=a=a||{},this.optimization="optimization"in this.env?this.env.optimization:1,this.env.filename=this.env.filename||null;return l={imports:o,parse:function(d,g){var h,l,m,o,p,q,r=[],t,u=null;c=f=k=i=0,j=[],b=d.replace(/\r\n/g,"\n"),j=function(c){var d=0,e=/[^"'`\{\}\/\(\)]+/g,f=/\/\*(?:[^*]|\*+[^\/*])*\*+\/|\/\/.*/g,g=0,h,i=c[0],j,k;for(var l=0,m,n;l0)throw{type:"Syntax",message:"Missing closing `}`",filename:a.filename};return c.map(function(a){return a.join("")})}([[]]),h=new e.Ruleset([],s(this.parsers.primary)),h.root=!0,h.toCSS=function(c){var d,f,g;return function(g,h){function n(a){return a?(b.slice(0,a).match(/\n/g)||"").length:null}var i=[];g=g||{},typeof h=="object"&&!Array.isArray(h)&&(h=Object.keys(h).map(function(a){var b=h[a];b instanceof e.Value||(b instanceof e.Expression||(b=new e.Expression([b])),b=new e.Value([b]));return new e.Rule("@"+a,b,!1,0)}),i=[new e.Ruleset(null,h)]);try{var j=c.call(this,{frames:i}).toCSS([],{compress:g.compress||!1})}catch(k){f=b.split("\n"),d=n(k.index);for(var l=k.index,m=-1;l>=0&&b.charAt(l)!=="\n";l--)m++;throw{type:k.type,message:k.message,filename:a.filename,index:k.index,line:typeof d=="number"?d+1:null,callLine:k.call&&n(k.call)+1,callExtract:f[n(k.call)],stack:k.stack,column:m,extract:[f[d-1],f[d],f[d+1]]}}return g.compress?j.replace(/(\s)+/g,"$1"):j}}(h.eval);if(c=0&&b.charAt(v)!=="\n";v--)w++;u={name:"ParseError",message:"Syntax Error on line "+p,index:c,filename:a.filename,line:p,column:w,extract:[q[p-2],q[p-1],q[p]]}}this.imports.queue.length>0?n=function(){g(u,h)}:g(u,h)},parsers:{primary:function(){var a,b=[];while((a=s(this.mixin.definition)||s(this.rule)||s(this.ruleset)||s(this.mixin.call)||s(this.comment)||s(this.directive))||s(/^[\s\n]+/))a&&b.push(a);return b},comment:function(){var a;if(b.charAt(c)==="/"){if(b.charAt(c+1)==="/")return new e.Comment(s(/^\/\/.*/),!0);if(a=s(/^\/\*(?:[^*]|\*+[^\/*])*\*+\/\n?/))return new e.Comment(a)}},entities:{quoted:function(){var a,d=c,f;b.charAt(d)==="~"&&(d++,f=!0);if(b.charAt(d)==='"'||b.charAt(d)==="'"){f&&s("~");if(a=s(/^"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'/))return new e.Quoted(a[0],a[1]||a[2],f)}},keyword:function(){var a;if(a=s(/^[A-Za-z-]+/))return new e.Keyword(a)},call:function(){var a,b;if(!!(a=/^([\w-]+|%)\(/.exec(j[f]))){a=a[1].toLowerCase();if(a==="url")return null;c+=a.length;if(a==="alpha")return s(this.alpha);s("("),b=s(this.entities.arguments);if(!s(")"))return;if(a)return new e.Call(a,b)}},arguments:function(){var a=[],b;while(b=s(this.expression)){a.push(b);if(!s(","))break}return a},literal:function(){return s(this.entities.dimension)||s(this.entities.color)||s(this.entities.quoted)},url:function(){var a;if(b.charAt(c)==="u"&&!!s(/^url\(/)){a=s(this.entities.quoted)||s(this.entities.variable)||s(this.entities.dataURI)||s(/^[-\w%@$\/.&=:;#+?~]+/)||"";if(!s(")"))throw new Error("missing closing ) for url()");return new e.URL(a.value||a.data||a instanceof e.Variable?a:new e.Anonymous(a),o.paths)}},dataURI:function(){var a;if(s(/^data:/)){a={},a.mime=s(/^[^\/]+\/[^,;)]+/)||"",a.charset=s(/^;\s*charset=[^,;)]+/)||"",a.base64=s(/^;\s*base64/)||"",a.data=s(/^,\s*[^)]+/);if(a.data)return a}},variable:function(){var a,d=c;if(b.charAt(c)==="@"&&(a=s(/^@@?[\w-]+/)))return new e.Variable(a,d)},color:function(){var a;if(b.charAt(c)==="#"&&(a=s(/^#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})/)))return new e.Color(a[1])},dimension:function(){var a,d=b.charCodeAt(c);if(!(d>57||d<45||d===47))if(a=s(/^(-?\d*\.?\d+)(px|%|em|pc|ex|in|deg|s|ms|pt|cm|mm|rad|grad|turn)?/))return new e.Dimension(a[1],a[2])},javascript:function(){var a,d=c,f;b.charAt(d)==="~"&&(d++,f=!0);if(b.charAt(d)==="`"){f&&s("~");if(a=s(/^`([^`]*)`/))return new e.JavaScript(a[1],c,f)}}},variable:function(){var a;if(b.charAt(c)==="@"&&(a=s(/^(@[\w-]+)\s*:/)))return a[1]},shorthand:function(){var a,b;if(!!t(/^[@\w.%-]+\/[@\w.-]+/)&&(a=s(this.entity))&&s("/")&&(b=s(this.entity)))return new e.Shorthand(a,b)},mixin:{call:function(){var a=[],d,f,g,h=c,i=b.charAt(c);if(i==="."||i==="#"){while(d=s(/^[#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/))a.push(new e.Element(f,d)),f=s(">");s("(")&&(g=s(this.entities.arguments))&&s(")");if(a.length>0&&(s(";")||t("}")))return new e.mixin.Call(a,g,h)}},definition:function(){var a,d=[],f,g,h,i;if(!(b.charAt(c)!=="."&&b.charAt(c)!=="#"||t(/^[^{]*(;|})/)))if(f=s(/^([#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+)\s*\(/)){a=f[1];while(h=s(this.entities.variable)||s(this.entities.literal)||s(this.entities.keyword)){if(h instanceof e.Variable)if(s(":"))if(i=s(this.expression))d.push({name:h.name,value:i});else throw new Error("Expected value");else d.push({name:h.name});else d.push({value:h});if(!s(","))break}if(!s(")"))throw new Error("Expected )");g=s(this.block);if(g)return new e.mixin.Definition(a,d,g)}}},entity:function(){return s(this.entities.literal)||s(this.entities.variable)||s(this.entities.url)||s(this.entities.call)||s(this.entities.keyword)||s(this.entities.javascript)||s(this.comment)},end:function(){return s(";")||t("}")},alpha:function(){var a;if(!!s(/^opacity=/i))if(a=s(/^\d+/)||s(this.entities.variable)){if(!s(")"))throw new Error("missing closing ) for alpha()");return new e.Alpha(a)}},element:function(){var a,b,c;c=s(this.combinator),a=s(/^(?:[.#]?|:*)(?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/)||s("*")||s(this.attribute)||s(/^\([^)@]+\)/);if(a)return new e.Element(c,a)},combinator:function(){var a,d=b.charAt(c);if(d===">"||d==="&"||d==="+"||d==="~"){c++;while(b.charAt(c)===" ")c++;return new e.Combinator(d)}if(d===":"&&b.charAt(c+1)===":"){c+=2;while(b.charAt(c)===" ")c++;return new e.Combinator("::")}return b.charAt(c-1)===" "?new e.Combinator(" "):new e.Combinator(null)},selector:function(){var a,d,f=[],g,h;while(d=s(this.element)){g=b.charAt(c),f.push(d);if(g==="{"||g==="}"||g===";"||g===",")break}if(f.length>0)return new e.Selector(f)},tag:function(){return s(/^[a-zA-Z][a-zA-Z-]*[0-9]?/)||s("*")},attribute:function(){var a="",b,c,d;if(!!s("[")){if(b=s(/^[a-zA-Z-]+/)||s(this.entities.quoted))(d=s(/^[|~*$^]?=/))&&(c=s(this.entities.quoted)||s(/^[\w-]+/))?a=[b,d,c.toCSS?c.toCSS():c].join(""):a=b;if(!s("]"))return;if(a)return"["+a+"]"}},block:function(){var a;if(s("{")&&(a=s(this.primary))&&s("}"))return a},ruleset:function(){var a=[],b,d,g;p();if(g=/^([.#: \w-]+)[\s\n]*\{/.exec(j[f]))c+=g[0].length-1,a=[new e.Selector([new e.Element(null,g[1])])];else while(b=s(this.selector)){a.push(b),s(this.comment);if(!s(","))break;s(this.comment)}if(a.length>0&&(d=s(this.block)))return new e.Ruleset(a,d);i=c,q()},rule:function(){var a,d,g=b.charAt(c),k,l;p();if(g!=="."&&g!=="#"&&g!=="&")if(a=s(this.variable)||s(this.property)){a.charAt(0)!="@"&&(l=/^([^@+\/'"*`(;{}-]*);/.exec(j[f]))?(c+=l[0].length-1,d=new e.Anonymous(l[1])):a==="font"?d=s(this.font):d=s(this.value),k=s(this.important);if(d&&s(this.end))return new e.Rule(a,d,k,h);i=c,q()}},"import":function(){var a;if(s(/^@import\s+/)&&(a=s(this.entities.quoted)||s(this.entities.url))&&s(";"))return new e.Import(a,o)},directive:function(){var a,d,f,g;if(b.charAt(c)==="@"){if(d=s(this["import"]))return d;if(a=s(/^@media|@page|@-[-a-z]+/)){g=(s(/^[^{]+/)||"").trim();if(f=s(this.block))return new e.Directive(a+" "+g,f)}else if(a=s(/^@[-a-z]+/))if(a==="@font-face"){if(f=s(this.block))return new e.Directive(a,f)}else if((d=s(this.entity))&&s(";"))return new e.Directive(a,d)}},font:function(){var a=[],b=[],c,d,f,g;while(g=s(this.shorthand)||s(this.entity))b.push(g);a.push(new e.Expression(b));if(s(","))while(g=s(this.expression)){a.push(g);if(!s(","))break}return new e.Value(a)},value:function(){var a,b=[],c;while(a=s(this.expression)){b.push(a);if(!s(","))break}if(b.length>0)return new e.Value(b)},important:function(){if(b.charAt(c)==="!")return s(/^! *important/)},sub:function(){var a;if(s("(")&&(a=s(this.expression))&&s(")"))return a},multiplication:function(){var a,b,c,d;if(a=s(this.operand)){while((c=s("/")||s("*"))&&(b=s(this.operand)))d=new e.Operation(c,[d||a,b]);return d||a}},addition:function(){var a,d,f,g;if(a=s(this.multiplication)){while((f=s(/^[-+]\s+/)||b.charAt(c-1)!=" "&&(s("+")||s("-")))&&(d=s(this.multiplication)))g=new e.Operation(f,[g||a,d]);return g||a}},operand:function(){var a,d=b.charAt(c+1);b.charAt(c)==="-"&&(d==="@"||d==="(")&&(a=s("-"));var f=s(this.sub)||s(this.entities.dimension)||s(this.entities.color)||s(this.entities.variable)||s(this.entities.call);return a?new e.Operation("*",[new e.Dimension(-1),f]):f},expression:function(){var a,b,c=[],d;while(a=s(this.addition)||s(this.entity))c.push(a);if(c.length>0)return new e.Expression(c)},property:function(){var a;if(a=s(/^(\*?-?[-a-z_0-9]+)\s*:/))return a[1]}}}},typeof a!="undefined"&&(d.Parser.importer=function(a,b,c,d){a.charAt(0)!=="/"&&b.length>0&&(a=b[0]+a),o({href:a,title:a,type:d.mime},c,!0)}),function(a){function d(a){return Math.min(1,Math.max(0,a))}function c(b){if(b instanceof a.Dimension)return parseFloat(b.unit=="%"?b.value/100:b.value);if(typeof b=="number")return b;throw{error:"RuntimeError",message:"color functions take numbers as parameters"}}function b(b){return a.functions.hsla(b.h,b.s,b.l,b.a)}a.functions={rgb:function(a,b,c){return this.rgba(a,b,c,1)},rgba:function(b,d,e,f){var g=[b,d,e].map(function(a){return c(a)}),f=c(f);return new a.Color(g,f)},hsl:function(a,b,c){return this.hsla(a,b,c,1)},hsla:function(a,b,d,e){function h(a){a=a<0?a+1:a>1?a-1:a;return a*6<1?g+(f-g)*a*6:a*2<1?f:a*3<2?g+(f-g)*(2/3-a)*6:g}a=c(a)%360/360,b=c(b),d=c(d),e=c(e);var f=d<=.5?d*(b+1):d+b-d*b,g=d*2-f;return this.rgba(h(a+1/3)*255,h(a)*255,h(a-1/3)*255,e)},hue:function(b){return new a.Dimension(Math.round(b.toHSL().h))},saturation:function(b){return new a.Dimension(Math.round(b.toHSL().s*100),"%")},lightness:function(b){return new a.Dimension(Math.round(b.toHSL().l*100),"%")},alpha:function(b){return new a.Dimension(b.toHSL().a)},saturate:function(a,c){var e=a.toHSL();e.s+=c.value/100,e.s=d(e.s);return b(e)},desaturate:function(a,c){var e=a.toHSL();e.s-=c.value/100,e.s=d(e.s);return b(e)},lighten:function(a,c){var e=a.toHSL();e.l+=c.value/100,e.l=d(e.l);return b(e)},darken:function(a,c){var e=a.toHSL();e.l-=c.value/100,e.l=d(e.l);return b(e)},fadein:function(a,c){var e=a.toHSL();e.a+=c.value/100,e.a=d(e.a);return b(e)},fadeout:function(a,c){var e=a.toHSL();e.a-=c.value/100,e.a=d(e.a);return b(e)},spin:function(a,c){var d=a.toHSL(),e=(d.h+c.value)%360;d.h=e<0?360+e:e;return b(d)},mix:function(b,c,d){var e=d.value/100,f=e*2-1,g=b.toHSL().a-c.toHSL().a,h=((f*g==-1?f:(f+g)/(1+f*g))+1)/2,i=1-h,j=[b.rgb[0]*h+c.rgb[0]*i,b.rgb[1]*h+c.rgb[1]*i,b.rgb[2]*h+c.rgb[2]*i],k=b.alpha*e+c.alpha*(1-e);return new a.Color(j,k)},greyscale:function(b){return this.desaturate(b,new a.Dimension(100))},e:function(b){return new a.Anonymous(b instanceof a.JavaScript?b.evaluated:b)},escape:function(b){return new a.Anonymous(encodeURI(b.value).replace(/=/g,"%3D").replace(/:/g,"%3A").replace(/#/g,"%23").replace(/;/g,"%3B").replace(/\(/g,"%28").replace(/\)/g,"%29"))},"%":function(b){var c=Array.prototype.slice.call(arguments,1),d=b.value;for(var e=0;e255?255:a<0?0:a).toString(16);return a.length===1?"0"+a:a}).join("")},operate:function(b,c){var d=[];c instanceof a.Color||(c=c.toColor());for(var e=0;e<3;e++)d[e]=a.operate(b,this.rgb[e],c.rgb[e]);return new a.Color(d,this.alpha+c.alpha)},toHSL:function(){var a=this.rgb[0]/255,b=this.rgb[1]/255,c=this.rgb[2]/255,d=this.alpha,e=Math.max(a,b,c),f=Math.min(a,b,c),g,h,i=(e+f)/2,j=e-f;if(e===f)g=h=0;else{h=i>.5?j/(2-e-f):j/(e+f);switch(e){case a:g=(b-c)/j+(b":a.compress?">":" > "}[this.value]}}(c("less/tree")),function(a){a.Expression=function(a){this.value=a},a.Expression.prototype={eval:function(b){return this.value.length>1?new a.Expression(this.value.map(function(a){return a.eval(b)})):this.value.length===1?this.value[0].eval(b):this},toCSS:function(a){return this.value.map(function(b){return b.toCSS(a)}).join(" ")}}}(c("less/tree")),function(a){a.Import=function(b,c){var d=this;this._path=b,b instanceof a.Quoted?this.path=/\.(le?|c)ss$/.test(b.value)?b.value:b.value+".less":this.path=b.value.value||b.value,this.css=/css$/.test(this.path),this.css||c.push(this.path,function(a){if(!a)throw new Error("Error parsing "+d.path);d.root=a})},a.Import.prototype={toCSS:function(){return this.css?"@import "+this._path.toCSS()+";\n":""},eval:function(b){var c;if(this.css)return this;c=new a.Ruleset(null,this.root.rules.slice(0));for(var d=0;d0){for(var f=0;f0&&c>this.params.length)return!1;d=Math.min(c,this.arity);for(var e=0;e1?Array.prototype.push.apply(d,e.find(new a.Selector(b.elements.slice(1)),c)):d.push(e);break}});return this._lookups[g]=d},toCSS:function(b,c){var d=[],e=[],f=[],g=[],h,i;if(!this.root)if(b.length===0)g=this.selectors.map(function(a){return[a]});else for(var j=0;j0&&(h=g.map(function(a){return a.map(function(a){return a.toCSS(c)}).join("").trim()}).join(c.compress?",":g.length>3?",\n":", "),d.push(h,(c.compress?"{":" {\n ")+e.join(c.compress?"":"\n ")+(c.compress?"}":"\n}\n"))),d.push(f);return d.join("")+(c.compress?"\n":"")}}}(c("less/tree")),function(a){a.Selector=function(a){this.elements=a,this.elements[0].combinator.value===""&&(this.elements[0].combinator.value=" ")},a.Selector.prototype.match=function(a){return this.elements[0].value===a.elements[0].value?!0:!1},a.Selector.prototype.toCSS=function(a){if(this._css)return this._css;return this._css=this.elements.map(function(b){return typeof b=="string"?" "+b.trim():b.toCSS(a)}).join("")}}(c("less/tree")),function(b){b.URL=function(b,c){b.data?this.attrs=b:(!/^(?:https?:\/|file:\/|data:\/)?\//.test(b.value)&&c.length>0&&typeof a!="undefined"&&(b.value=c[0]+(b.value.charAt(0)==="/"?b.value.slice(1):b.value)),this.value=b,this.paths=c)},b.URL.prototype={toCSS:function(){return"url("+(this.attrs?"data:"+this.attrs.mime+this.attrs.charset+this.attrs.base64+this.attrs.data:this.value.toCSS())+")"},eval:function(a){return this.attrs?this:new b.URL(this.value.eval(a),this.paths)}}}(c("less/tree")),function(a){a.Value=function(a){this.value=a,this.is="value"},a.Value.prototype={eval:function(b){return this.value.length===1?this.value[0].eval(b):new a.Value(this.value.map(function(a){return a.eval(b)}))},toCSS:function(a){return this.value.map(function(b){return b.toCSS(a)}).join(a.compress?",":", ")}}}(c("less/tree")),function(a){a.Variable=function(a,b){this.name=a,this.index=b},a.Variable.prototype={eval:function(b){var c,d,e=this.name;e.indexOf("@@")==0&&(e="@"+(new a.Variable(e.slice(1))).eval(b).value);if(c=a.find(b.frames,function(a){if(d=a.variable(e))return d.value.eval(b)}))return c;throw{message:"variable "+e+" is undefined",index:this.index}}}}(c("less/tree" +)),c("less/tree").find=function(a,b){for(var c=0,d;c1?"["+a.value.map(function(a){return a.toCSS(!1)}).join(", ")+"]":a.toCSS(!1)};var g=location.protocol==="file:"||location.protocol==="chrome:"||location.protocol==="chrome-extension:"||location.protocol==="resource:";d.env=d.env||(location.hostname=="127.0.0.1"||location.hostname=="0.0.0.0"||location.hostname=="localhost"||location.port.length>0||g?"development":"production"),d.async=!1,d.poll=d.poll||(g?1e3:1500),d.watch=function(){return this.watchMode=!0},d.unwatch=function(){return this.watchMode=!1},d.env==="development"?(d.optimization=0,/!watch/.test(location.hash)&&d.watch(),d.watchTimer=setInterval(function(){d.watchMode&&n(function(a,b,c){a&&q(a.toCSS(),b,c.lastModified)})},d.poll)):d.optimization=3;var h;try{h=typeof a.localStorage=="undefined"?null:a.localStorage}catch(i){h=null}var j=document.getElementsByTagName("link"),k=/^text\/(x-)?less$/;d.sheets=[];for(var l=0;l>> 0; + for (var i = 0; i < len; i++) { + if (i in this) { + block.call(thisObject, this[i], i, this); + } + } + }; +} +if (!Array.prototype.map) { + Array.prototype.map = function(fun /*, thisp*/) { + var len = this.length >>> 0; + var res = new Array(len); + var thisp = arguments[1]; + + for (var i = 0; i < len; i++) { + if (i in this) { + res[i] = fun.call(thisp, this[i], i, this); + } + } + return res; + }; +} +if (!Array.prototype.filter) { + Array.prototype.filter = function (block /*, thisp */) { + var values = []; + var thisp = arguments[1]; + for (var i = 0; i < this.length; i++) { + if (block.call(thisp, this[i])) { + values.push(this[i]); + } + } + return values; + }; +} +if (!Array.prototype.reduce) { + Array.prototype.reduce = function(fun /*, initial*/) { + var len = this.length >>> 0; + var i = 0; + + // no value to return if no initial value and an empty array + if (len === 0 && arguments.length === 1) throw new TypeError(); + + if (arguments.length >= 2) { + var rv = arguments[1]; + } else { + do { + if (i in this) { + rv = this[i++]; + break; + } + // if array contains no values, no initial value to return + if (++i >= len) throw new TypeError(); + } while (true); + } + for (; i < len; i++) { + if (i in this) { + rv = fun.call(null, rv, this[i], i, this); + } + } + return rv; + }; +} +if (!Array.prototype.indexOf) { + Array.prototype.indexOf = function (value /*, fromIndex */ ) { + var length = this.length; + var i = arguments[1] || 0; + + if (!length) return -1; + if (i >= length) return -1; + if (i < 0) i += length; + + for (; i < length; i++) { + if (!Object.prototype.hasOwnProperty.call(this, i)) { continue } + if (value === this[i]) return i; + } + return -1; + }; +} + +// +// Object +// +if (!Object.keys) { + Object.keys = function (object) { + var keys = []; + for (var name in object) { + if (Object.prototype.hasOwnProperty.call(object, name)) { + keys.push(name); + } + } + return keys; + }; +} + +// +// String +// +if (!String.prototype.trim) { + String.prototype.trim = function () { + return String(this).replace(/^\s\s*/, '').replace(/\s\s*$/, ''); + }; +} +var less, tree; + +if (typeof(window) === 'undefined') { + less = exports, + tree = require('less/tree'); +} else { + if (typeof(window.less) === 'undefined') { window.less = {} } + less = window.less, + tree = window.less.tree = {}; +} +// +// less.js - parser +// +// A relatively straight-forward predictive parser. +// There is no tokenization/lexing stage, the input is parsed +// in one sweep. +// +// To make the parser fast enough to run in the browser, several +// optimization had to be made: +// +// - Matching and slicing on a huge input is often cause of slowdowns. +// The solution is to chunkify the input into smaller strings. +// The chunks are stored in the `chunks` var, +// `j` holds the current chunk index, and `current` holds +// the index of the current chunk in relation to `input`. +// This gives us an almost 4x speed-up. +// +// - In many cases, we don't need to match individual tokens; +// for example, if a value doesn't hold any variables, operations +// or dynamic references, the parser can effectively 'skip' it, +// treating it as a literal. +// An example would be '1px solid #000' - which evaluates to itself, +// we don't need to know what the individual components are. +// The drawback, of course is that you don't get the benefits of +// syntax-checking on the CSS. This gives us a 50% speed-up in the parser, +// and a smaller speed-up in the code-gen. +// +// +// Token matching is done with the `$` function, which either takes +// a terminal string or regexp, or a non-terminal function to call. +// It also takes care of moving all the indices forwards. +// +// +less.Parser = function Parser(env) { + var input, // LeSS input string + i, // current index in `input` + j, // current chunk + temp, // temporarily holds a chunk's state, for backtracking + memo, // temporarily holds `i`, when backtracking + furthest, // furthest index the parser has gone to + chunks, // chunkified input + current, // index of current chunk, in `input` + parser; + + var that = this; + + // This function is called after all files + // have been imported through `@import`. + var finish = function () {}; + + var imports = this.imports = { + paths: env && env.paths || [], // Search paths, when importing + queue: [], // Files which haven't been imported yet + files: {}, // Holds the imported parse trees + mime: env && env.mime, // MIME type of .less files + push: function (path, callback) { + var that = this; + this.queue.push(path); + + // + // Import a file asynchronously + // + less.Parser.importer(path, this.paths, function (root) { + that.queue.splice(that.queue.indexOf(path), 1); // Remove the path from the queue + that.files[path] = root; // Store the root + + callback(root); + + if (that.queue.length === 0) { finish() } // Call `finish` if we're done importing + }, env); + } + }; + + function save() { temp = chunks[j], memo = i, current = i } + function restore() { chunks[j] = temp, i = memo, current = i } + + function sync() { + if (i > current) { + chunks[j] = chunks[j].slice(i - current); + current = i; + } + } + // + // Parse from a token, regexp or string, and move forward if match + // + function $(tok) { + var match, args, length, c, index, endIndex, k, mem; + + // + // Non-terminal + // + if (tok instanceof Function) { + return tok.call(parser.parsers); + // + // Terminal + // + // Either match a single character in the input, + // or match a regexp in the current chunk (chunk[j]). + // + } else if (typeof(tok) === 'string') { + match = input.charAt(i) === tok ? tok : null; + length = 1; + sync (); + } else { + sync (); + + if (match = tok.exec(chunks[j])) { + length = match[0].length; + } else { + return null; + } + } + + // The match is confirmed, add the match length to `i`, + // and consume any extra white-space characters (' ' || '\n') + // which come after that. The reason for this is that LeSS's + // grammar is mostly white-space insensitive. + // + if (match) { + mem = i += length; + endIndex = i + chunks[j].length - length; + + while (i < endIndex) { + c = input.charCodeAt(i); + if (! (c === 32 || c === 10 || c === 9)) { break } + i++; + } + chunks[j] = chunks[j].slice(length + (i - mem)); + current = i; + + if (chunks[j].length === 0 && j < chunks.length - 1) { j++ } + + if(typeof(match) === 'string') { + return match; + } else { + return match.length === 1 ? match[0] : match; + } + } + } + + // Same as $(), but don't change the state of the parser, + // just return the match. + function peek(tok) { + if (typeof(tok) === 'string') { + return input.charAt(i) === tok; + } else { + if (tok.test(chunks[j])) { + return true; + } else { + return false; + } + } + } + + this.env = env = env || {}; + + // The optimization level dictates the thoroughness of the parser, + // the lower the number, the less nodes it will create in the tree. + // This could matter for debugging, or if you want to access + // the individual nodes in the tree. + this.optimization = ('optimization' in this.env) ? this.env.optimization : 1; + + this.env.filename = this.env.filename || null; + + // + // The Parser + // + return parser = { + + imports: imports, + // + // Parse an input string into an abstract syntax tree, + // call `callback` when done. + // + parse: function (str, callback) { + var root, start, end, zone, line, lines, buff = [], c, error = null; + + i = j = current = furthest = 0; + chunks = []; + input = str.replace(/\r\n/g, '\n'); + + // Split the input into chunks. + chunks = (function (chunks) { + var j = 0, + skip = /[^"'`\{\}\/\(\)]+/g, + comment = /\/\*(?:[^*]|\*+[^\/*])*\*+\/|\/\/.*/g, + level = 0, + match, + chunk = chunks[0], + inParam, + inString; + + for (var i = 0, c, cc; i < input.length; i++) { + skip.lastIndex = i; + if (match = skip.exec(input)) { + if (match.index === i) { + i += match[0].length; + chunk.push(match[0]); + } + } + c = input.charAt(i); + comment.lastIndex = i; + + if (!inString && !inParam && c === '/') { + cc = input.charAt(i + 1); + if (cc === '/' || cc === '*') { + if (match = comment.exec(input)) { + if (match.index === i) { + i += match[0].length; + chunk.push(match[0]); + c = input.charAt(i); + } + } + } + } + + if (c === '{' && !inString && !inParam) { level ++; + chunk.push(c); + } else if (c === '}' && !inString && !inParam) { level --; + chunk.push(c); + chunks[++j] = chunk = []; + } else if (c === '(' && !inString && !inParam) { + chunk.push(c); + inParam = true; + } else if (c === ')' && !inString && inParam) { + chunk.push(c); + inParam = false; + } else { + if (c === '"' || c === "'" || c === '`') { + if (! inString) { + inString = c; + } else { + inString = inString === c ? false : inString; + } + } + chunk.push(c); + } + } + if (level > 0) { + throw { + type: 'Syntax', + message: "Missing closing `}`", + filename: env.filename + }; + } + + return chunks.map(function (c) { return c.join('') });; + })([[]]); + + // Start with the primary rule. + // The whole syntax tree is held under a Ruleset node, + // with the `root` property set to true, so no `{}` are + // output. The callback is called when the input is parsed. + root = new(tree.Ruleset)([], $(this.parsers.primary)); + root.root = true; + + root.toCSS = (function (evaluate) { + var line, lines, column; + + return function (options, variables) { + var frames = []; + + options = options || {}; + // + // Allows setting variables with a hash, so: + // + // `{ color: new(tree.Color)('#f01') }` will become: + // + // new(tree.Rule)('@color', + // new(tree.Value)([ + // new(tree.Expression)([ + // new(tree.Color)('#f01') + // ]) + // ]) + // ) + // + if (typeof(variables) === 'object' && !Array.isArray(variables)) { + variables = Object.keys(variables).map(function (k) { + var value = variables[k]; + + if (! (value instanceof tree.Value)) { + if (! (value instanceof tree.Expression)) { + value = new(tree.Expression)([value]); + } + value = new(tree.Value)([value]); + } + return new(tree.Rule)('@' + k, value, false, 0); + }); + frames = [new(tree.Ruleset)(null, variables)]; + } + + try { + var css = evaluate.call(this, { frames: frames }) + .toCSS([], { compress: options.compress || false }); + } catch (e) { + lines = input.split('\n'); + line = getLine(e.index); + + for (var n = e.index, column = -1; + n >= 0 && input.charAt(n) !== '\n'; + n--) { column++ } + + throw { + type: e.type, + message: e.message, + filename: env.filename, + index: e.index, + line: typeof(line) === 'number' ? line + 1 : null, + callLine: e.call && (getLine(e.call) + 1), + callExtract: lines[getLine(e.call)], + stack: e.stack, + column: column, + extract: [ + lines[line - 1], + lines[line], + lines[line + 1] + ] + }; + } + if (options.compress) { + return css.replace(/(\s)+/g, "$1"); + } else { + return css; + } + + function getLine(index) { + return index ? (input.slice(0, index).match(/\n/g) || "").length : null; + } + }; + })(root.eval); + + // If `i` is smaller than the `input.length - 1`, + // it means the parser wasn't able to parse the whole + // string, so we've got a parsing error. + // + // We try to extract a \n delimited string, + // showing the line where the parse error occured. + // We split it up into two parts (the part which parsed, + // and the part which didn't), so we can color them differently. + if (i < input.length - 1) { + i = furthest; + lines = input.split('\n'); + line = (input.slice(0, i).match(/\n/g) || "").length + 1; + + for (var n = i, column = -1; n >= 0 && input.charAt(n) !== '\n'; n--) { column++ } + + error = { + name: "ParseError", + message: "Syntax Error on line " + line, + index: i, + filename: env.filename, + line: line, + column: column, + extract: [ + lines[line - 2], + lines[line - 1], + lines[line] + ] + }; + } + + if (this.imports.queue.length > 0) { + finish = function () { callback(error, root) }; + } else { + callback(error, root); + } + }, + + // + // Here in, the parsing rules/functions + // + // The basic structure of the syntax tree generated is as follows: + // + // Ruleset -> Rule -> Value -> Expression -> Entity + // + // Here's some LESS code: + // + // .class { + // color: #fff; + // border: 1px solid #000; + // width: @w + 4px; + // > .child {...} + // } + // + // And here's what the parse tree might look like: + // + // Ruleset (Selector '.class', [ + // Rule ("color", Value ([Expression [Color #fff]])) + // Rule ("border", Value ([Expression [Dimension 1px][Keyword "solid"][Color #000]])) + // Rule ("width", Value ([Expression [Operation "+" [Variable "@w"][Dimension 4px]]])) + // Ruleset (Selector [Element '>', '.child'], [...]) + // ]) + // + // In general, most rules will try to parse a token with the `$()` function, and if the return + // value is truly, will return a new node, of the relevant type. Sometimes, we need to check + // first, before parsing, that's when we use `peek()`. + // + parsers: { + // + // The `primary` rule is the *entry* and *exit* point of the parser. + // The rules here can appear at any level of the parse tree. + // + // The recursive nature of the grammar is an interplay between the `block` + // rule, which represents `{ ... }`, the `ruleset` rule, and this `primary` rule, + // as represented by this simplified grammar: + // + // primary → (ruleset | rule)+ + // ruleset → selector+ block + // block → '{' primary '}' + // + // Only at one point is the primary rule not called from the + // block rule: at the root level. + // + primary: function () { + var node, root = []; + + while ((node = $(this.mixin.definition) || $(this.rule) || $(this.ruleset) || + $(this.mixin.call) || $(this.comment) || $(this.directive)) + || $(/^[\s\n]+/)) { + node && root.push(node); + } + return root; + }, + + // We create a Comment node for CSS comments `/* */`, + // but keep the LeSS comments `//` silent, by just skipping + // over them. + comment: function () { + var comment; + + if (input.charAt(i) !== '/') return; + + if (input.charAt(i + 1) === '/') { + return new(tree.Comment)($(/^\/\/.*/), true); + } else if (comment = $(/^\/\*(?:[^*]|\*+[^\/*])*\*+\/\n?/)) { + return new(tree.Comment)(comment); + } + }, + + // + // Entities are tokens which can be found inside an Expression + // + entities: { + // + // A string, which supports escaping " and ' + // + // "milky way" 'he\'s the one!' + // + quoted: function () { + var str, j = i, e; + + if (input.charAt(j) === '~') { j++, e = true } // Escaped strings + if (input.charAt(j) !== '"' && input.charAt(j) !== "'") return; + + e && $('~'); + + if (str = $(/^"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'/)) { + return new(tree.Quoted)(str[0], str[1] || str[2], e); + } + }, + + // + // A catch-all word, such as: + // + // black border-collapse + // + keyword: function () { + var k; + if (k = $(/^[A-Za-z-]+/)) { return new(tree.Keyword)(k) } + }, + + // + // A function call + // + // rgb(255, 0, 255) + // + // We also try to catch IE's `alpha()`, but let the `alpha` parser + // deal with the details. + // + // The arguments are parsed with the `entities.arguments` parser. + // + call: function () { + var name, args; + + if (! (name = /^([\w-]+|%)\(/.exec(chunks[j]))) return; + + name = name[1].toLowerCase(); + + if (name === 'url') { return null } + else { i += name.length } + + if (name === 'alpha') { return $(this.alpha) } + + $('('); // Parse the '(' and consume whitespace. + + args = $(this.entities.arguments); + + if (! $(')')) return; + + if (name) { return new(tree.Call)(name, args) } + }, + arguments: function () { + var args = [], arg; + + while (arg = $(this.expression)) { + args.push(arg); + if (! $(',')) { break } + } + return args; + }, + literal: function () { + return $(this.entities.dimension) || + $(this.entities.color) || + $(this.entities.quoted); + }, + + // + // Parse url() tokens + // + // We use a specific rule for urls, because they don't really behave like + // standard function calls. The difference is that the argument doesn't have + // to be enclosed within a string, so it can't be parsed as an Expression. + // + url: function () { + var value; + + if (input.charAt(i) !== 'u' || !$(/^url\(/)) return; + value = $(this.entities.quoted) || $(this.entities.variable) || + $(this.entities.dataURI) || $(/^[-\w%@$\/.&=:;#+?~]+/) || ""; + if (! $(')')) throw new(Error)("missing closing ) for url()"); + + return new(tree.URL)((value.value || value.data || value instanceof tree.Variable) + ? value : new(tree.Anonymous)(value), imports.paths); + }, + + dataURI: function () { + var obj; + + if ($(/^data:/)) { + obj = {}; + obj.mime = $(/^[^\/]+\/[^,;)]+/) || ''; + obj.charset = $(/^;\s*charset=[^,;)]+/) || ''; + obj.base64 = $(/^;\s*base64/) || ''; + obj.data = $(/^,\s*[^)]+/); + + if (obj.data) { return obj } + } + }, + + // + // A Variable entity, such as `@fink`, in + // + // width: @fink + 2px + // + // We use a different parser for variable definitions, + // see `parsers.variable`. + // + variable: function () { + var name, index = i; + + if (input.charAt(i) === '@' && (name = $(/^@@?[\w-]+/))) { + return new(tree.Variable)(name, index); + } + }, + + // + // A Hexadecimal color + // + // #4F3C2F + // + // `rgb` and `hsl` colors are parsed through the `entities.call` parser. + // + color: function () { + var rgb; + + if (input.charAt(i) === '#' && (rgb = $(/^#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})/))) { + return new(tree.Color)(rgb[1]); + } + }, + + // + // A Dimension, that is, a number and a unit + // + // 0.5em 95% + // + dimension: function () { + var value, c = input.charCodeAt(i); + if ((c > 57 || c < 45) || c === 47) return; + + if (value = $(/^(-?\d*\.?\d+)(px|%|em|pc|ex|in|deg|s|ms|pt|cm|mm|rad|grad|turn)?/)) { + return new(tree.Dimension)(value[1], value[2]); + } + }, + + // + // JavaScript code to be evaluated + // + // `window.location.href` + // + javascript: function () { + var str, j = i, e; + + if (input.charAt(j) === '~') { j++, e = true } // Escaped strings + if (input.charAt(j) !== '`') { return } + + e && $('~'); + + if (str = $(/^`([^`]*)`/)) { + return new(tree.JavaScript)(str[1], i, e); + } + } + }, + + // + // The variable part of a variable definition. Used in the `rule` parser + // + // @fink: + // + variable: function () { + var name; + + if (input.charAt(i) === '@' && (name = $(/^(@[\w-]+)\s*:/))) { return name[1] } + }, + + // + // A font size/line-height shorthand + // + // small/12px + // + // We need to peek first, or we'll match on keywords and dimensions + // + shorthand: function () { + var a, b; + + if (! peek(/^[@\w.%-]+\/[@\w.-]+/)) return; + + if ((a = $(this.entity)) && $('/') && (b = $(this.entity))) { + return new(tree.Shorthand)(a, b); + } + }, + + // + // Mixins + // + mixin: { + // + // A Mixin call, with an optional argument list + // + // #mixins > .square(#fff); + // .rounded(4px, black); + // .button; + // + // The `while` loop is there because mixins can be + // namespaced, but we only support the child and descendant + // selector for now. + // + call: function () { + var elements = [], e, c, args, index = i, s = input.charAt(i); + + if (s !== '.' && s !== '#') { return } + + while (e = $(/^[#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/)) { + elements.push(new(tree.Element)(c, e)); + c = $('>'); + } + $('(') && (args = $(this.entities.arguments)) && $(')'); + + if (elements.length > 0 && ($(';') || peek('}'))) { + return new(tree.mixin.Call)(elements, args, index); + } + }, + + // + // A Mixin definition, with a list of parameters + // + // .rounded (@radius: 2px, @color) { + // ... + // } + // + // Until we have a finer grained state-machine, we have to + // do a look-ahead, to make sure we don't have a mixin call. + // See the `rule` function for more information. + // + // We start by matching `.rounded (`, and then proceed on to + // the argument list, which has optional default values. + // We store the parameters in `params`, with a `value` key, + // if there is a value, such as in the case of `@radius`. + // + // Once we've got our params list, and a closing `)`, we parse + // the `{...}` block. + // + definition: function () { + var name, params = [], match, ruleset, param, value; + + if ((input.charAt(i) !== '.' && input.charAt(i) !== '#') || + peek(/^[^{]*(;|})/)) return; + + if (match = $(/^([#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+)\s*\(/)) { + name = match[1]; + + while (param = $(this.entities.variable) || $(this.entities.literal) + || $(this.entities.keyword)) { + // Variable + if (param instanceof tree.Variable) { + if ($(':')) { + if (value = $(this.expression)) { + params.push({ name: param.name, value: value }); + } else { + throw new(Error)("Expected value"); + } + } else { + params.push({ name: param.name }); + } + } else { + params.push({ value: param }); + } + if (! $(',')) { break } + } + if (! $(')')) throw new(Error)("Expected )"); + + ruleset = $(this.block); + + if (ruleset) { + return new(tree.mixin.Definition)(name, params, ruleset); + } + } + } + }, + + // + // Entities are the smallest recognized token, + // and can be found inside a rule's value. + // + entity: function () { + return $(this.entities.literal) || $(this.entities.variable) || $(this.entities.url) || + $(this.entities.call) || $(this.entities.keyword) || $(this.entities.javascript) || + $(this.comment); + }, + + // + // A Rule terminator. Note that we use `peek()` to check for '}', + // because the `block` rule will be expecting it, but we still need to make sure + // it's there, if ';' was ommitted. + // + end: function () { + return $(';') || peek('}'); + }, + + // + // IE's alpha function + // + // alpha(opacity=88) + // + alpha: function () { + var value; + + if (! $(/^opacity=/i)) return; + if (value = $(/^\d+/) || $(this.entities.variable)) { + if (! $(')')) throw new(Error)("missing closing ) for alpha()"); + return new(tree.Alpha)(value); + } + }, + + // + // A Selector Element + // + // div + // + h1 + // #socks + // input[type="text"] + // + // Elements are the building blocks for Selectors, + // they are made out of a `Combinator` (see combinator rule), + // and an element name, such as a tag a class, or `*`. + // + element: function () { + var e, t, c; + + c = $(this.combinator); + e = $(/^(?:[.#]?|:*)(?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/) || $('*') || $(this.attribute) || $(/^\([^)@]+\)/); + + if (e) { return new(tree.Element)(c, e) } + }, + + // + // Combinators combine elements together, in a Selector. + // + // Because our parser isn't white-space sensitive, special care + // has to be taken, when parsing the descendant combinator, ` `, + // as it's an empty space. We have to check the previous character + // in the input, to see if it's a ` ` character. More info on how + // we deal with this in *combinator.js*. + // + combinator: function () { + var match, c = input.charAt(i); + + if (c === '>' || c === '&' || c === '+' || c === '~') { + i++; + while (input.charAt(i) === ' ') { i++ } + return new(tree.Combinator)(c); + } else if (c === ':' && input.charAt(i + 1) === ':') { + i += 2; + while (input.charAt(i) === ' ') { i++ } + return new(tree.Combinator)('::'); + } else if (input.charAt(i - 1) === ' ') { + return new(tree.Combinator)(" "); + } else { + return new(tree.Combinator)(null); + } + }, + + // + // A CSS Selector + // + // .class > div + h1 + // li a:hover + // + // Selectors are made out of one or more Elements, see above. + // + selector: function () { + var sel, e, elements = [], c, match; + + while (e = $(this.element)) { + c = input.charAt(i); + elements.push(e) + if (c === '{' || c === '}' || c === ';' || c === ',') { break } + } + + if (elements.length > 0) { return new(tree.Selector)(elements) } + }, + tag: function () { + return $(/^[a-zA-Z][a-zA-Z-]*[0-9]?/) || $('*'); + }, + attribute: function () { + var attr = '', key, val, op; + + if (! $('[')) return; + + if (key = $(/^[a-zA-Z-]+/) || $(this.entities.quoted)) { + if ((op = $(/^[|~*$^]?=/)) && + (val = $(this.entities.quoted) || $(/^[\w-]+/))) { + attr = [key, op, val.toCSS ? val.toCSS() : val].join(''); + } else { attr = key } + } + + if (! $(']')) return; + + if (attr) { return "[" + attr + "]" } + }, + + // + // The `block` rule is used by `ruleset` and `mixin.definition`. + // It's a wrapper around the `primary` rule, with added `{}`. + // + block: function () { + var content; + + if ($('{') && (content = $(this.primary)) && $('}')) { + return content; + } + }, + + // + // div, .class, body > p {...} + // + ruleset: function () { + var selectors = [], s, rules, match; + save(); + + if (match = /^([.#: \w-]+)[\s\n]*\{/.exec(chunks[j])) { + i += match[0].length - 1; + selectors = [new(tree.Selector)([new(tree.Element)(null, match[1])])]; + } else { + while (s = $(this.selector)) { + selectors.push(s); + $(this.comment); + if (! $(',')) { break } + $(this.comment); + } + } + + if (selectors.length > 0 && (rules = $(this.block))) { + return new(tree.Ruleset)(selectors, rules); + } else { + // Backtrack + furthest = i; + restore(); + } + }, + rule: function () { + var name, value, c = input.charAt(i), important, match; + save(); + + if (c === '.' || c === '#' || c === '&') { return } + + if (name = $(this.variable) || $(this.property)) { + if ((name.charAt(0) != '@') && (match = /^([^@+\/'"*`(;{}-]*);/.exec(chunks[j]))) { + i += match[0].length - 1; + value = new(tree.Anonymous)(match[1]); + } else if (name === "font") { + value = $(this.font); + } else { + value = $(this.value); + } + important = $(this.important); + + if (value && $(this.end)) { + return new(tree.Rule)(name, value, important, memo); + } else { + furthest = i; + restore(); + } + } + }, + + // + // An @import directive + // + // @import "lib"; + // + // Depending on our environemnt, importing is done differently: + // In the browser, it's an XHR request, in Node, it would be a + // file-system operation. The function used for importing is + // stored in `import`, which we pass to the Import constructor. + // + "import": function () { + var path; + if ($(/^@import\s+/) && + (path = $(this.entities.quoted) || $(this.entities.url)) && + $(';')) { + return new(tree.Import)(path, imports); + } + }, + + // + // A CSS Directive + // + // @charset "utf-8"; + // + directive: function () { + var name, value, rules, types; + + if (input.charAt(i) !== '@') return; + + if (value = $(this['import'])) { + return value; + } else if (name = $(/^@media|@page|@-[-a-z]+/)) { + types = ($(/^[^{]+/) || '').trim(); + if (rules = $(this.block)) { + return new(tree.Directive)(name + " " + types, rules); + } + } else if (name = $(/^@[-a-z]+/)) { + if (name === '@font-face') { + if (rules = $(this.block)) { + return new(tree.Directive)(name, rules); + } + } else if ((value = $(this.entity)) && $(';')) { + return new(tree.Directive)(name, value); + } + } + }, + font: function () { + var value = [], expression = [], weight, shorthand, font, e; + + while (e = $(this.shorthand) || $(this.entity)) { + expression.push(e); + } + value.push(new(tree.Expression)(expression)); + + if ($(',')) { + while (e = $(this.expression)) { + value.push(e); + if (! $(',')) { break } + } + } + return new(tree.Value)(value); + }, + + // + // A Value is a comma-delimited list of Expressions + // + // font-family: Baskerville, Georgia, serif; + // + // In a Rule, a Value represents everything after the `:`, + // and before the `;`. + // + value: function () { + var e, expressions = [], important; + + while (e = $(this.expression)) { + expressions.push(e); + if (! $(',')) { break } + } + + if (expressions.length > 0) { + return new(tree.Value)(expressions); + } + }, + important: function () { + if (input.charAt(i) === '!') { + return $(/^! *important/); + } + }, + sub: function () { + var e; + + if ($('(') && (e = $(this.expression)) && $(')')) { + return e; + } + }, + multiplication: function () { + var m, a, op, operation; + if (m = $(this.operand)) { + while ((op = ($('/') || $('*'))) && (a = $(this.operand))) { + operation = new(tree.Operation)(op, [operation || m, a]); + } + return operation || m; + } + }, + addition: function () { + var m, a, op, operation; + if (m = $(this.multiplication)) { + while ((op = $(/^[-+]\s+/) || (input.charAt(i - 1) != ' ' && ($('+') || $('-')))) && + (a = $(this.multiplication))) { + operation = new(tree.Operation)(op, [operation || m, a]); + } + return operation || m; + } + }, + + // + // An operand is anything that can be part of an operation, + // such as a Color, or a Variable + // + operand: function () { + var negate, p = input.charAt(i + 1); + + if (input.charAt(i) === '-' && (p === '@' || p === '(')) { negate = $('-') } + var o = $(this.sub) || $(this.entities.dimension) || + $(this.entities.color) || $(this.entities.variable) || + $(this.entities.call); + return negate ? new(tree.Operation)('*', [new(tree.Dimension)(-1), o]) + : o; + }, + + // + // Expressions either represent mathematical operations, + // or white-space delimited Entities. + // + // 1px solid black + // @var * 2 + // + expression: function () { + var e, delim, entities = [], d; + + while (e = $(this.addition) || $(this.entity)) { + entities.push(e); + } + if (entities.length > 0) { + return new(tree.Expression)(entities); + } + }, + property: function () { + var name; + + if (name = $(/^(\*?-?[-a-z_0-9]+)\s*:/)) { + return name[1]; + } + } + } + }; +}; + +if (typeof(window) !== 'undefined') { + // + // Used by `@import` directives + // + less.Parser.importer = function (path, paths, callback, env) { + if (path.charAt(0) !== '/' && paths.length > 0) { + path = paths[0] + path; + } + // We pass `true` as 3rd argument, to force the reload of the import. + // This is so we can get the syntax tree as opposed to just the CSS output, + // as we need this to evaluate the current stylesheet. + loadStyleSheet({ href: path, title: path, type: env.mime }, callback, true); + }; +} + +(function (tree) { + +tree.functions = { + rgb: function (r, g, b) { + return this.rgba(r, g, b, 1.0); + }, + rgba: function (r, g, b, a) { + var rgb = [r, g, b].map(function (c) { return number(c) }), + a = number(a); + return new(tree.Color)(rgb, a); + }, + hsl: function (h, s, l) { + return this.hsla(h, s, l, 1.0); + }, + hsla: function (h, s, l, a) { + h = (number(h) % 360) / 360; + s = number(s); l = number(l); a = number(a); + + var m2 = l <= 0.5 ? l * (s + 1) : l + s - l * s; + var m1 = l * 2 - m2; + + return this.rgba(hue(h + 1/3) * 255, + hue(h) * 255, + hue(h - 1/3) * 255, + a); + + function hue(h) { + h = h < 0 ? h + 1 : (h > 1 ? h - 1 : h); + if (h * 6 < 1) return m1 + (m2 - m1) * h * 6; + else if (h * 2 < 1) return m2; + else if (h * 3 < 2) return m1 + (m2 - m1) * (2/3 - h) * 6; + else return m1; + } + }, + hue: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().h)); + }, + saturation: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().s * 100), '%'); + }, + lightness: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().l * 100), '%'); + }, + alpha: function (color) { + return new(tree.Dimension)(color.toHSL().a); + }, + saturate: function (color, amount) { + var hsl = color.toHSL(); + + hsl.s += amount.value / 100; + hsl.s = clamp(hsl.s); + return hsla(hsl); + }, + desaturate: function (color, amount) { + var hsl = color.toHSL(); + + hsl.s -= amount.value / 100; + hsl.s = clamp(hsl.s); + return hsla(hsl); + }, + lighten: function (color, amount) { + var hsl = color.toHSL(); + + hsl.l += amount.value / 100; + hsl.l = clamp(hsl.l); + return hsla(hsl); + }, + darken: function (color, amount) { + var hsl = color.toHSL(); + + hsl.l -= amount.value / 100; + hsl.l = clamp(hsl.l); + return hsla(hsl); + }, + fadein: function (color, amount) { + var hsl = color.toHSL(); + + hsl.a += amount.value / 100; + hsl.a = clamp(hsl.a); + return hsla(hsl); + }, + fadeout: function (color, amount) { + var hsl = color.toHSL(); + + hsl.a -= amount.value / 100; + hsl.a = clamp(hsl.a); + return hsla(hsl); + }, + spin: function (color, amount) { + var hsl = color.toHSL(); + var hue = (hsl.h + amount.value) % 360; + + hsl.h = hue < 0 ? 360 + hue : hue; + + return hsla(hsl); + }, + // + // Copyright (c) 2006-2009 Hampton Catlin, Nathan Weizenbaum, and Chris Eppstein + // http://sass-lang.com + // + mix: function (color1, color2, weight) { + var p = weight.value / 100.0; + var w = p * 2 - 1; + var a = color1.toHSL().a - color2.toHSL().a; + + var w1 = (((w * a == -1) ? w : (w + a) / (1 + w * a)) + 1) / 2.0; + var w2 = 1 - w1; + + var rgb = [color1.rgb[0] * w1 + color2.rgb[0] * w2, + color1.rgb[1] * w1 + color2.rgb[1] * w2, + color1.rgb[2] * w1 + color2.rgb[2] * w2]; + + var alpha = color1.alpha * p + color2.alpha * (1 - p); + + return new(tree.Color)(rgb, alpha); + }, + greyscale: function (color) { + return this.desaturate(color, new(tree.Dimension)(100)); + }, + e: function (str) { + return new(tree.Anonymous)(str instanceof tree.JavaScript ? str.evaluated : str); + }, + escape: function (str) { + return new(tree.Anonymous)(encodeURI(str.value).replace(/=/g, "%3D").replace(/:/g, "%3A").replace(/#/g, "%23").replace(/;/g, "%3B").replace(/\(/g, "%28").replace(/\)/g, "%29")); + }, + '%': function (quoted /* arg, arg, ...*/) { + var args = Array.prototype.slice.call(arguments, 1), + str = quoted.value; + + for (var i = 0; i < args.length; i++) { + str = str.replace(/%[sda]/i, function(token) { + var value = token.match(/s/i) ? args[i].value : args[i].toCSS(); + return token.match(/[A-Z]$/) ? encodeURIComponent(value) : value; + }); + } + str = str.replace(/%%/g, '%'); + return new(tree.Quoted)('"' + str + '"', str); + }, + round: function (n) { + if (n instanceof tree.Dimension) { + return new(tree.Dimension)(Math.round(number(n)), n.unit); + } else if (typeof(n) === 'number') { + return Math.round(n); + } else { + throw { + error: "RuntimeError", + message: "math functions take numbers as parameters" + }; + } + } +}; + +function hsla(hsla) { + return tree.functions.hsla(hsla.h, hsla.s, hsla.l, hsla.a); +} + +function number(n) { + if (n instanceof tree.Dimension) { + return parseFloat(n.unit == '%' ? n.value / 100 : n.value); + } else if (typeof(n) === 'number') { + return n; + } else { + throw { + error: "RuntimeError", + message: "color functions take numbers as parameters" + }; + } +} + +function clamp(val) { + return Math.min(1, Math.max(0, val)); +} + +})(require('less/tree')); +(function (tree) { + +tree.Alpha = function (val) { + this.value = val; +}; +tree.Alpha.prototype = { + toCSS: function () { + return "alpha(opacity=" + + (this.value.toCSS ? this.value.toCSS() : this.value) + ")"; + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Anonymous = function (string) { + this.value = string.value || string; +}; +tree.Anonymous.prototype = { + toCSS: function () { + return this.value; + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +// +// A function call node. +// +tree.Call = function (name, args) { + this.name = name; + this.args = args; +}; +tree.Call.prototype = { + // + // When evaluating a function call, + // we either find the function in `tree.functions` [1], + // in which case we call it, passing the evaluated arguments, + // or we simply print it out as it appeared originally [2]. + // + // The *functions.js* file contains the built-in functions. + // + // The reason why we evaluate the arguments, is in the case where + // we try to pass a variable to a function, like: `saturate(@color)`. + // The function should receive the value, not the variable. + // + eval: function (env) { + var args = this.args.map(function (a) { return a.eval(env) }); + + if (this.name in tree.functions) { // 1. + return tree.functions[this.name].apply(tree.functions, args); + } else { // 2. + return new(tree.Anonymous)(this.name + + "(" + args.map(function (a) { return a.toCSS() }).join(', ') + ")"); + } + }, + + toCSS: function (env) { + return this.eval(env).toCSS(); + } +}; + +})(require('less/tree')); +(function (tree) { +// +// RGB Colors - #ff0014, #eee +// +tree.Color = function (rgb, a) { + // + // The end goal here, is to parse the arguments + // into an integer triplet, such as `128, 255, 0` + // + // This facilitates operations and conversions. + // + if (Array.isArray(rgb)) { + this.rgb = rgb; + } else if (rgb.length == 6) { + this.rgb = rgb.match(/.{2}/g).map(function (c) { + return parseInt(c, 16); + }); + } else if (rgb.length == 8) { + this.alpha = parseInt(rgb.substring(0,2), 16) / 255.0; + this.rgb = rgb.substr(2).match(/.{2}/g).map(function (c) { + return parseInt(c, 16); + }); + } else { + this.rgb = rgb.split('').map(function (c) { + return parseInt(c + c, 16); + }); + } + this.alpha = typeof(a) === 'number' ? a : 1; +}; +tree.Color.prototype = { + eval: function () { return this }, + + // + // If we have some transparency, the only way to represent it + // is via `rgba`. Otherwise, we use the hex representation, + // which has better compatibility with older browsers. + // Values are capped between `0` and `255`, rounded and zero-padded. + // + toCSS: function () { + if (this.alpha < 1.0) { + return "rgba(" + this.rgb.map(function (c) { + return Math.round(c); + }).concat(this.alpha).join(', ') + ")"; + } else { + return '#' + this.rgb.map(function (i) { + i = Math.round(i); + i = (i > 255 ? 255 : (i < 0 ? 0 : i)).toString(16); + return i.length === 1 ? '0' + i : i; + }).join(''); + } + }, + + // + // Operations have to be done per-channel, if not, + // channels will spill onto each other. Once we have + // our result, in the form of an integer triplet, + // we create a new Color node to hold the result. + // + operate: function (op, other) { + var result = []; + + if (! (other instanceof tree.Color)) { + other = other.toColor(); + } + + for (var c = 0; c < 3; c++) { + result[c] = tree.operate(op, this.rgb[c], other.rgb[c]); + } + return new(tree.Color)(result, this.alpha + other.alpha); + }, + + toHSL: function () { + var r = this.rgb[0] / 255, + g = this.rgb[1] / 255, + b = this.rgb[2] / 255, + a = this.alpha; + + var max = Math.max(r, g, b), min = Math.min(r, g, b); + var h, s, l = (max + min) / 2, d = max - min; + + if (max === min) { + h = s = 0; + } else { + s = l > 0.5 ? d / (2 - max - min) : d / (max + min); + + switch (max) { + case r: h = (g - b) / d + (g < b ? 6 : 0); break; + case g: h = (b - r) / d + 2; break; + case b: h = (r - g) / d + 4; break; + } + h /= 6; + } + return { h: h * 360, s: s, l: l, a: a }; + } +}; + + +})(require('less/tree')); +(function (tree) { + +tree.Comment = function (value, silent) { + this.value = value; + this.silent = !!silent; +}; +tree.Comment.prototype = { + toCSS: function (env) { + return env.compress ? '' : this.value; + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +// +// A number with a unit +// +tree.Dimension = function (value, unit) { + this.value = parseFloat(value); + this.unit = unit || null; +}; + +tree.Dimension.prototype = { + eval: function () { return this }, + toColor: function () { + return new(tree.Color)([this.value, this.value, this.value]); + }, + toCSS: function () { + var css = this.value + this.unit; + return css; + }, + + // In an operation between two Dimensions, + // we default to the first Dimension's unit, + // so `1px + 2em` will yield `3px`. + // In the future, we could implement some unit + // conversions such that `100cm + 10mm` would yield + // `101cm`. + operate: function (op, other) { + return new(tree.Dimension) + (tree.operate(op, this.value, other.value), + this.unit || other.unit); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Directive = function (name, value) { + this.name = name; + if (Array.isArray(value)) { + this.ruleset = new(tree.Ruleset)([], value); + } else { + this.value = value; + } +}; +tree.Directive.prototype = { + toCSS: function (ctx, env) { + if (this.ruleset) { + this.ruleset.root = true; + return this.name + (env.compress ? '{' : ' {\n ') + + this.ruleset.toCSS(ctx, env).trim().replace(/\n/g, '\n ') + + (env.compress ? '}': '\n}\n'); + } else { + return this.name + ' ' + this.value.toCSS() + ';\n'; + } + }, + eval: function (env) { + env.frames.unshift(this); + this.ruleset = this.ruleset && this.ruleset.eval(env); + env.frames.shift(); + return this; + }, + variable: function (name) { return tree.Ruleset.prototype.variable.call(this.ruleset, name) }, + find: function () { return tree.Ruleset.prototype.find.apply(this.ruleset, arguments) }, + rulesets: function () { return tree.Ruleset.prototype.rulesets.apply(this.ruleset) } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Element = function (combinator, value) { + this.combinator = combinator instanceof tree.Combinator ? + combinator : new(tree.Combinator)(combinator); + this.value = value.trim(); +}; +tree.Element.prototype.toCSS = function (env) { + return this.combinator.toCSS(env || {}) + this.value; +}; + +tree.Combinator = function (value) { + if (value === ' ') { + this.value = ' '; + } else { + this.value = value ? value.trim() : ""; + } +}; +tree.Combinator.prototype.toCSS = function (env) { + return { + '' : '', + ' ' : ' ', + '&' : '', + ':' : ' :', + '::': '::', + '+' : env.compress ? '+' : ' + ', + '~' : env.compress ? '~' : ' ~ ', + '>' : env.compress ? '>' : ' > ' + }[this.value]; +}; + +})(require('less/tree')); +(function (tree) { + +tree.Expression = function (value) { this.value = value }; +tree.Expression.prototype = { + eval: function (env) { + if (this.value.length > 1) { + return new(tree.Expression)(this.value.map(function (e) { + return e.eval(env); + })); + } else if (this.value.length === 1) { + return this.value[0].eval(env); + } else { + return this; + } + }, + toCSS: function (env) { + return this.value.map(function (e) { + return e.toCSS(env); + }).join(' '); + } +}; + +})(require('less/tree')); +(function (tree) { +// +// CSS @import node +// +// The general strategy here is that we don't want to wait +// for the parsing to be completed, before we start importing +// the file. That's because in the context of a browser, +// most of the time will be spent waiting for the server to respond. +// +// On creation, we push the import path to our import queue, though +// `import,push`, we also pass it a callback, which it'll call once +// the file has been fetched, and parsed. +// +tree.Import = function (path, imports) { + var that = this; + + this._path = path; + + // The '.less' extension is optional + if (path instanceof tree.Quoted) { + this.path = /\.(le?|c)ss$/.test(path.value) ? path.value : path.value + '.less'; + } else { + this.path = path.value.value || path.value; + } + + this.css = /css$/.test(this.path); + + // Only pre-compile .less files + if (! this.css) { + imports.push(this.path, function (root) { + if (! root) { + throw new(Error)("Error parsing " + that.path); + } + that.root = root; + }); + } +}; + +// +// The actual import node doesn't return anything, when converted to CSS. +// The reason is that it's used at the evaluation stage, so that the rules +// it imports can be treated like any other rules. +// +// In `eval`, we make sure all Import nodes get evaluated, recursively, so +// we end up with a flat structure, which can easily be imported in the parent +// ruleset. +// +tree.Import.prototype = { + toCSS: function () { + if (this.css) { + return "@import " + this._path.toCSS() + ';\n'; + } else { + return ""; + } + }, + eval: function (env) { + var ruleset; + + if (this.css) { + return this; + } else { + ruleset = new(tree.Ruleset)(null, this.root.rules.slice(0)); + + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.Import) { + Array.prototype + .splice + .apply(ruleset.rules, + [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + return ruleset.rules; + } + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.JavaScript = function (string, index, escaped) { + this.escaped = escaped; + this.expression = string; + this.index = index; +}; +tree.JavaScript.prototype = { + eval: function (env) { + var result, + that = this, + context = {}; + + var expression = this.expression.replace(/@\{([\w-]+)\}/g, function (_, name) { + return tree.jsify(new(tree.Variable)('@' + name, that.index).eval(env)); + }); + + try { + expression = new(Function)('return (' + expression + ')'); + } catch (e) { + throw { message: "JavaScript evaluation error: `" + expression + "`" , + index: this.index }; + } + + for (var k in env.frames[0].variables()) { + context[k.slice(1)] = { + value: env.frames[0].variables()[k].value, + toJS: function () { + return this.value.eval(env).toCSS(); + } + }; + } + + try { + result = expression.call(context); + } catch (e) { + throw { message: "JavaScript evaluation error: '" + e.name + ': ' + e.message + "'" , + index: this.index }; + } + if (typeof(result) === 'string') { + return new(tree.Quoted)('"' + result + '"', result, this.escaped, this.index); + } else if (Array.isArray(result)) { + return new(tree.Anonymous)(result.join(', ')); + } else { + return new(tree.Anonymous)(result); + } + } +}; + +})(require('less/tree')); + +(function (tree) { + +tree.Keyword = function (value) { this.value = value }; +tree.Keyword.prototype = { + eval: function () { return this }, + toCSS: function () { return this.value } +}; + +})(require('less/tree')); +(function (tree) { + +tree.mixin = {}; +tree.mixin.Call = function (elements, args, index) { + this.selector = new(tree.Selector)(elements); + this.arguments = args; + this.index = index; +}; +tree.mixin.Call.prototype = { + eval: function (env) { + var mixins, args, rules = [], match = false; + + for (var i = 0; i < env.frames.length; i++) { + if ((mixins = env.frames[i].find(this.selector)).length > 0) { + args = this.arguments && this.arguments.map(function (a) { return a.eval(env) }); + for (var m = 0; m < mixins.length; m++) { + if (mixins[m].match(args, env)) { + try { + Array.prototype.push.apply( + rules, mixins[m].eval(env, this.arguments).rules); + match = true; + } catch (e) { + throw { message: e.message, index: e.index, stack: e.stack, call: this.index }; + } + } + } + if (match) { + return rules; + } else { + throw { message: 'No matching definition was found for `' + + this.selector.toCSS().trim() + '(' + + this.arguments.map(function (a) { + return a.toCSS(); + }).join(', ') + ")`", + index: this.index }; + } + } + } + throw { message: this.selector.toCSS().trim() + " is undefined", + index: this.index }; + } +}; + +tree.mixin.Definition = function (name, params, rules) { + this.name = name; + this.selectors = [new(tree.Selector)([new(tree.Element)(null, name)])]; + this.params = params; + this.arity = params.length; + this.rules = rules; + this._lookups = {}; + this.required = params.reduce(function (count, p) { + if (!p.name || (p.name && !p.value)) { return count + 1 } + else { return count } + }, 0); + this.parent = tree.Ruleset.prototype; + this.frames = []; +}; +tree.mixin.Definition.prototype = { + toCSS: function () { return "" }, + variable: function (name) { return this.parent.variable.call(this, name) }, + variables: function () { return this.parent.variables.call(this) }, + find: function () { return this.parent.find.apply(this, arguments) }, + rulesets: function () { return this.parent.rulesets.apply(this) }, + + eval: function (env, args) { + var frame = new(tree.Ruleset)(null, []), context, _arguments = []; + + for (var i = 0, val; i < this.params.length; i++) { + if (this.params[i].name) { + if (val = (args && args[i]) || this.params[i].value) { + frame.rules.unshift(new(tree.Rule)(this.params[i].name, val.eval(env))); + } else { + throw { message: "wrong number of arguments for " + this.name + + ' (' + args.length + ' for ' + this.arity + ')' }; + } + } + } + for (var i = 0; i < Math.max(this.params.length, args && args.length); i++) { + _arguments.push(args[i] || this.params[i].value); + } + frame.rules.unshift(new(tree.Rule)('@arguments', new(tree.Expression)(_arguments).eval(env))); + + return new(tree.Ruleset)(null, this.rules.slice(0)).eval({ + frames: [this, frame].concat(this.frames, env.frames) + }); + }, + match: function (args, env) { + var argsLength = (args && args.length) || 0, len; + + if (argsLength < this.required) { return false } + if ((this.required > 0) && (argsLength > this.params.length)) { return false } + + len = Math.min(argsLength, this.arity); + + for (var i = 0; i < len; i++) { + if (!this.params[i].name) { + if (args[i].eval(env).toCSS() != this.params[i].value.eval(env).toCSS()) { + return false; + } + } + } + return true; + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Operation = function (op, operands) { + this.op = op.trim(); + this.operands = operands; +}; +tree.Operation.prototype.eval = function (env) { + var a = this.operands[0].eval(env), + b = this.operands[1].eval(env), + temp; + + if (a instanceof tree.Dimension && b instanceof tree.Color) { + if (this.op === '*' || this.op === '+') { + temp = b, b = a, a = temp; + } else { + throw { name: "OperationError", + message: "Can't substract or divide a color from a number" }; + } + } + return a.operate(this.op, b); +}; + +tree.operate = function (op, a, b) { + switch (op) { + case '+': return a + b; + case '-': return a - b; + case '*': return a * b; + case '/': return a / b; + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Quoted = function (str, content, escaped, i) { + this.escaped = escaped; + this.value = content || ''; + this.quote = str.charAt(0); + this.index = i; +}; +tree.Quoted.prototype = { + toCSS: function () { + if (this.escaped) { + return this.value; + } else { + return this.quote + this.value + this.quote; + } + }, + eval: function (env) { + var that = this; + var value = this.value.replace(/`([^`]+)`/g, function (_, exp) { + return new(tree.JavaScript)(exp, that.index, true).eval(env).value; + }).replace(/@\{([\w-]+)\}/g, function (_, name) { + var v = new(tree.Variable)('@' + name, that.index).eval(env); + return v.value || v.toCSS(); + }); + return new(tree.Quoted)(this.quote + value + this.quote, value, this.escaped, this.index); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Rule = function (name, value, important, index) { + this.name = name; + this.value = (value instanceof tree.Value) ? value : new(tree.Value)([value]); + this.important = important ? ' ' + important.trim() : ''; + this.index = index; + + if (name.charAt(0) === '@') { + this.variable = true; + } else { this.variable = false } +}; +tree.Rule.prototype.toCSS = function (env) { + if (this.variable) { return "" } + else { + return this.name + (env.compress ? ':' : ': ') + + this.value.toCSS(env) + + this.important + ";"; + } +}; + +tree.Rule.prototype.eval = function (context) { + return new(tree.Rule)(this.name, this.value.eval(context), this.important, this.index); +}; + +tree.Shorthand = function (a, b) { + this.a = a; + this.b = b; +}; + +tree.Shorthand.prototype = { + toCSS: function (env) { + return this.a.toCSS(env) + "/" + this.b.toCSS(env); + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Ruleset = function (selectors, rules) { + this.selectors = selectors; + this.rules = rules; + this._lookups = {}; +}; +tree.Ruleset.prototype = { + eval: function (env) { + var ruleset = new(tree.Ruleset)(this.selectors, this.rules.slice(0)); + + ruleset.root = this.root; + + // push the current ruleset to the frames stack + env.frames.unshift(ruleset); + + // Evaluate imports + if (ruleset.root) { + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.Import) { + Array.prototype.splice + .apply(ruleset.rules, [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + } + + // Store the frames around mixin definitions, + // so they can be evaluated like closures when the time comes. + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.mixin.Definition) { + ruleset.rules[i].frames = env.frames.slice(0); + } + } + + // Evaluate mixin calls. + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.mixin.Call) { + Array.prototype.splice + .apply(ruleset.rules, [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + + // Evaluate everything else + for (var i = 0, rule; i < ruleset.rules.length; i++) { + rule = ruleset.rules[i]; + + if (! (rule instanceof tree.mixin.Definition)) { + ruleset.rules[i] = rule.eval ? rule.eval(env) : rule; + } + } + + // Pop the stack + env.frames.shift(); + + return ruleset; + }, + match: function (args) { + return !args || args.length === 0; + }, + variables: function () { + if (this._variables) { return this._variables } + else { + return this._variables = this.rules.reduce(function (hash, r) { + if (r instanceof tree.Rule && r.variable === true) { + hash[r.name] = r; + } + return hash; + }, {}); + } + }, + variable: function (name) { + return this.variables()[name]; + }, + rulesets: function () { + if (this._rulesets) { return this._rulesets } + else { + return this._rulesets = this.rules.filter(function (r) { + return (r instanceof tree.Ruleset) || (r instanceof tree.mixin.Definition); + }); + } + }, + find: function (selector, self) { + self = self || this; + var rules = [], rule, match, + key = selector.toCSS(); + + if (key in this._lookups) { return this._lookups[key] } + + this.rulesets().forEach(function (rule) { + if (rule !== self) { + for (var j = 0; j < rule.selectors.length; j++) { + if (match = selector.match(rule.selectors[j])) { + if (selector.elements.length > 1) { + Array.prototype.push.apply(rules, rule.find( + new(tree.Selector)(selector.elements.slice(1)), self)); + } else { + rules.push(rule); + } + break; + } + } + } + }); + return this._lookups[key] = rules; + }, + // + // Entry point for code generation + // + // `context` holds an array of arrays. + // + toCSS: function (context, env) { + var css = [], // The CSS output + rules = [], // node.Rule instances + rulesets = [], // node.Ruleset instances + paths = [], // Current selectors + selector, // The fully rendered selector + rule; + + if (! this.root) { + if (context.length === 0) { + paths = this.selectors.map(function (s) { return [s] }); + } else { + for (var s = 0; s < this.selectors.length; s++) { + for (var c = 0; c < context.length; c++) { + paths.push(context[c].concat([this.selectors[s]])); + } + } + } + } + + // Compile rules and rulesets + for (var i = 0; i < this.rules.length; i++) { + rule = this.rules[i]; + + if (rule.rules || (rule instanceof tree.Directive)) { + rulesets.push(rule.toCSS(paths, env)); + } else if (rule instanceof tree.Comment) { + if (!rule.silent) { + if (this.root) { + rulesets.push(rule.toCSS(env)); + } else { + rules.push(rule.toCSS(env)); + } + } + } else { + if (rule.toCSS && !rule.variable) { + rules.push(rule.toCSS(env)); + } else if (rule.value && !rule.variable) { + rules.push(rule.value.toString()); + } + } + } + + rulesets = rulesets.join(''); + + // If this is the root node, we don't render + // a selector, or {}. + // Otherwise, only output if this ruleset has rules. + if (this.root) { + css.push(rules.join(env.compress ? '' : '\n')); + } else { + if (rules.length > 0) { + selector = paths.map(function (p) { + return p.map(function (s) { + return s.toCSS(env); + }).join('').trim(); + }).join(env.compress ? ',' : (paths.length > 3 ? ',\n' : ', ')); + css.push(selector, + (env.compress ? '{' : ' {\n ') + + rules.join(env.compress ? '' : '\n ') + + (env.compress ? '}' : '\n}\n')); + } + } + css.push(rulesets); + + return css.join('') + (env.compress ? '\n' : ''); + } +}; +})(require('less/tree')); +(function (tree) { + +tree.Selector = function (elements) { + this.elements = elements; + if (this.elements[0].combinator.value === "") { + this.elements[0].combinator.value = ' '; + } +}; +tree.Selector.prototype.match = function (other) { + if (this.elements[0].value === other.elements[0].value) { + return true; + } else { + return false; + } +}; +tree.Selector.prototype.toCSS = function (env) { + if (this._css) { return this._css } + + return this._css = this.elements.map(function (e) { + if (typeof(e) === 'string') { + return ' ' + e.trim(); + } else { + return e.toCSS(env); + } + }).join(''); +}; + +})(require('less/tree')); +(function (tree) { + +tree.URL = function (val, paths) { + if (val.data) { + this.attrs = val; + } else { + // Add the base path if the URL is relative and we are in the browser + if (!/^(?:https?:\/|file:\/|data:\/)?\//.test(val.value) && paths.length > 0 && typeof(window) !== 'undefined') { + val.value = paths[0] + (val.value.charAt(0) === '/' ? val.value.slice(1) : val.value); + } + this.value = val; + this.paths = paths; + } +}; +tree.URL.prototype = { + toCSS: function () { + return "url(" + (this.attrs ? 'data:' + this.attrs.mime + this.attrs.charset + this.attrs.base64 + this.attrs.data + : this.value.toCSS()) + ")"; + }, + eval: function (ctx) { + return this.attrs ? this : new(tree.URL)(this.value.eval(ctx), this.paths); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Value = function (value) { + this.value = value; + this.is = 'value'; +}; +tree.Value.prototype = { + eval: function (env) { + if (this.value.length === 1) { + return this.value[0].eval(env); + } else { + return new(tree.Value)(this.value.map(function (v) { + return v.eval(env); + })); + } + }, + toCSS: function (env) { + return this.value.map(function (e) { + return e.toCSS(env); + }).join(env.compress ? ',' : ', '); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Variable = function (name, index) { this.name = name, this.index = index }; +tree.Variable.prototype = { + eval: function (env) { + var variable, v, name = this.name; + + if (name.indexOf('@@') == 0) { + name = '@' + new(tree.Variable)(name.slice(1)).eval(env).value; + } + + if (variable = tree.find(env.frames, function (frame) { + if (v = frame.variable(name)) { + return v.value.eval(env); + } + })) { return variable } + else { + throw { message: "variable " + name + " is undefined", + index: this.index }; + } + } +}; + +})(require('less/tree')); +require('less/tree').find = function (obj, fun) { + for (var i = 0, r; i < obj.length; i++) { + if (r = fun.call(obj, obj[i])) { return r } + } + return null; +}; +require('less/tree').jsify = function (obj) { + if (Array.isArray(obj.value) && (obj.value.length > 1)) { + return '[' + obj.value.map(function (v) { return v.toCSS(false) }).join(', ') + ']'; + } else { + return obj.toCSS(false); + } +}; +// +// browser.js - client-side engine +// + +var isFileProtocol = (location.protocol === 'file:' || + location.protocol === 'chrome:' || + location.protocol === 'chrome-extension:' || + location.protocol === 'resource:'); + +less.env = less.env || (location.hostname == '127.0.0.1' || + location.hostname == '0.0.0.0' || + location.hostname == 'localhost' || + location.port.length > 0 || + isFileProtocol ? 'development' + : 'production'); + +// Load styles asynchronously (default: false) +// +// This is set to `false` by default, so that the body +// doesn't start loading before the stylesheets are parsed. +// Setting this to `true` can result in flickering. +// +less.async = false; + +// Interval between watch polls +less.poll = less.poll || (isFileProtocol ? 1000 : 1500); + +// +// Watch mode +// +less.watch = function () { return this.watchMode = true }; +less.unwatch = function () { return this.watchMode = false }; + +if (less.env === 'development') { + less.optimization = 0; + + if (/!watch/.test(location.hash)) { + less.watch(); + } + less.watchTimer = setInterval(function () { + if (less.watchMode) { + loadStyleSheets(function (root, sheet, env) { + if (root) { + createCSS(root.toCSS(), sheet, env.lastModified); + } + }); + } + }, less.poll); +} else { + less.optimization = 3; +} + +var cache; + +try { + cache = (typeof(window.localStorage) === 'undefined') ? null : window.localStorage; +} catch (_) { + cache = null; +} + +// +// Get all tags with the 'rel' attribute set to "stylesheet/less" +// +var links = document.getElementsByTagName('link'); +var typePattern = /^text\/(x-)?less$/; + +less.sheets = []; + +for (var i = 0; i < links.length; i++) { + if (links[i].rel === 'stylesheet/less' || (links[i].rel.match(/stylesheet/) && + (links[i].type.match(typePattern)))) { + less.sheets.push(links[i]); + } +} + + +less.refresh = function (reload) { + var startTime, endTime; + startTime = endTime = new(Date); + + loadStyleSheets(function (root, sheet, env) { + if (env.local) { + log("loading " + sheet.href + " from cache."); + } else { + log("parsed " + sheet.href + " successfully."); + createCSS(root.toCSS(), sheet, env.lastModified); + } + log("css for " + sheet.href + " generated in " + (new(Date) - endTime) + 'ms'); + (env.remaining === 0) && log("css generated in " + (new(Date) - startTime) + 'ms'); + endTime = new(Date); + }, reload); + + loadStyles(); +}; +less.refreshStyles = loadStyles; + +less.refresh(less.env === 'development'); + +function loadStyles() { + var styles = document.getElementsByTagName('style'); + for (var i = 0; i < styles.length; i++) { + if (styles[i].type.match(typePattern)) { + new(less.Parser)().parse(styles[i].innerHTML || '', function (e, tree) { + styles[i].type = 'text/css'; + styles[i].innerHTML = tree.toCSS(); + }); + } + } +} + +function loadStyleSheets(callback, reload) { + for (var i = 0; i < less.sheets.length; i++) { + loadStyleSheet(less.sheets[i], callback, reload, less.sheets.length - (i + 1)); + } +} + +function loadStyleSheet(sheet, callback, reload, remaining) { + var url = window.location.href.replace(/[#?].*$/, ''); + var href = sheet.href.replace(/\?.*$/, ''); + var css = cache && cache.getItem(href); + var timestamp = cache && cache.getItem(href + ':timestamp'); + var styles = { css: css, timestamp: timestamp }; + + // Stylesheets in IE don't always return the full path + if (! /^(https?|file):/.test(href)) { + if (href.charAt(0) == "/") { + href = window.location.protocol + "//" + window.location.host + href; + } else { + href = url.slice(0, url.lastIndexOf('/') + 1) + href; + } + } + + xhr(sheet.href, sheet.type, function (data, lastModified) { + if (!reload && styles && lastModified && + (new(Date)(lastModified).valueOf() === + new(Date)(styles.timestamp).valueOf())) { + // Use local copy + createCSS(styles.css, sheet); + callback(null, sheet, { local: true, remaining: remaining }); + } else { + // Use remote copy (re-parse) + try { + new(less.Parser)({ + optimization: less.optimization, + paths: [href.replace(/[\w\.-]+$/, '')], + mime: sheet.type + }).parse(data, function (e, root) { + if (e) { return error(e, href) } + try { + callback(root, sheet, { local: false, lastModified: lastModified, remaining: remaining }); + removeNode(document.getElementById('less-error-message:' + extractId(href))); + } catch (e) { + error(e, href); + } + }); + } catch (e) { + error(e, href); + } + } + }, function (status, url) { + throw new(Error)("Couldn't load " + url + " (" + status + ")"); + }); +} + +function extractId(href) { + return href.replace(/^[a-z]+:\/\/?[^\/]+/, '' ) // Remove protocol & domain + .replace(/^\//, '' ) // Remove root / + .replace(/\?.*$/, '' ) // Remove query + .replace(/\.[^\.\/]+$/, '' ) // Remove file extension + .replace(/[^\.\w-]+/g, '-') // Replace illegal characters + .replace(/\./g, ':'); // Replace dots with colons(for valid id) +} + +function createCSS(styles, sheet, lastModified) { + var css; + + // Strip the query-string + var href = sheet.href ? sheet.href.replace(/\?.*$/, '') : ''; + + // If there is no title set, use the filename, minus the extension + var id = 'less:' + (sheet.title || extractId(href)); + + // If the stylesheet doesn't exist, create a new node + if ((css = document.getElementById(id)) === null) { + css = document.createElement('style'); + css.type = 'text/css'; + css.media = sheet.media || 'screen'; + css.id = id; + document.getElementsByTagName('head')[0].appendChild(css); + } + + if (css.styleSheet) { // IE + try { + css.styleSheet.cssText = styles; + } catch (e) { + throw new(Error)("Couldn't reassign styleSheet.cssText."); + } + } else { + (function (node) { + if (css.childNodes.length > 0) { + if (css.firstChild.nodeValue !== node.nodeValue) { + css.replaceChild(node, css.firstChild); + } + } else { + css.appendChild(node); + } + })(document.createTextNode(styles)); + } + + // Don't update the local store if the file wasn't modified + if (lastModified && cache) { + log('saving ' + href + ' to cache.'); + cache.setItem(href, styles); + cache.setItem(href + ':timestamp', lastModified); + } +} + +function xhr(url, type, callback, errback) { + var xhr = getXMLHttpRequest(); + var async = isFileProtocol ? false : less.async; + + if (typeof(xhr.overrideMimeType) === 'function') { + xhr.overrideMimeType('text/css'); + } + xhr.open('GET', url, async); + xhr.setRequestHeader('Accept', type || 'text/x-less, text/css; q=0.9, */*; q=0.5'); + xhr.send(null); + + if (isFileProtocol) { + if (xhr.status === 0) { + callback(xhr.responseText); + } else { + errback(xhr.status, url); + } + } else if (async) { + xhr.onreadystatechange = function () { + if (xhr.readyState == 4) { + handleResponse(xhr, callback, errback); + } + }; + } else { + handleResponse(xhr, callback, errback); + } + + function handleResponse(xhr, callback, errback) { + if (xhr.status >= 200 && xhr.status < 300) { + callback(xhr.responseText, + xhr.getResponseHeader("Last-Modified")); + } else if (typeof(errback) === 'function') { + errback(xhr.status, url); + } + } +} + +function getXMLHttpRequest() { + if (window.XMLHttpRequest) { + return new(XMLHttpRequest); + } else { + try { + return new(ActiveXObject)("MSXML2.XMLHTTP.3.0"); + } catch (e) { + log("browser doesn't support AJAX."); + return null; + } + } +} + +function removeNode(node) { + return node && node.parentNode.removeChild(node); +} + +function log(str) { + if (less.env == 'development' && typeof(console) !== "undefined") { console.log('less: ' + str) } +} + +function error(e, href) { + var id = 'less-error-message:' + extractId(href); + + var template = ['
      ', + '
    • {0}
    • ', + '
    • {current}
    • ', + '
    • {2}
    • ', + '
    '].join('\n'); + + var elem = document.createElement('div'), timer, content; + + elem.id = id; + elem.className = "less-error-message"; + + content = '

    ' + (e.message || 'There is an error in your .less file') + + '

    ' + '

    ' + href + " "; + + if (e.extract) { + content += 'on line ' + e.line + ', column ' + (e.column + 1) + ':

    ' + + template.replace(/\[(-?\d)\]/g, function (_, i) { + return (parseInt(e.line) + parseInt(i)) || ''; + }).replace(/\{(\d)\}/g, function (_, i) { + return e.extract[parseInt(i)] || ''; + }).replace(/\{current\}/, e.extract[1].slice(0, e.column) + '' + + e.extract[1].slice(e.column) + ''); + } + elem.innerHTML = content; + + // CSS for error messages + createCSS([ + '.less-error-message ul, .less-error-message li {', + 'list-style-type: none;', + 'margin-right: 15px;', + 'padding: 4px 0;', + 'margin: 0;', + '}', + '.less-error-message label {', + 'font-size: 12px;', + 'margin-right: 15px;', + 'padding: 4px 0;', + 'color: #cc7777;', + '}', + '.less-error-message pre {', + 'color: #ee4444;', + 'padding: 4px 0;', + 'margin: 0;', + 'display: inline-block;', + '}', + '.less-error-message pre.ctx {', + 'color: #dd4444;', + '}', + '.less-error-message h3 {', + 'font-size: 20px;', + 'font-weight: bold;', + 'padding: 15px 0 5px 0;', + 'margin: 0;', + '}', + '.less-error-message a {', + 'color: #10a', + '}', + '.less-error-message .error {', + 'color: red;', + 'font-weight: bold;', + 'padding-bottom: 2px;', + 'border-bottom: 1px dashed red;', + '}' + ].join('\n'), { title: 'error-message' }); + + elem.style.cssText = [ + "font-family: Arial, sans-serif", + "border: 1px solid #e00", + "background-color: #eee", + "border-radius: 5px", + "-webkit-border-radius: 5px", + "-moz-border-radius: 5px", + "color: #e00", + "padding: 15px", + "margin-bottom: 15px" + ].join(';'); + + if (less.env == 'development') { + timer = setInterval(function () { + if (document.body) { + if (document.getElementById(id)) { + document.body.replaceChild(elem, document.getElementById(id)); + } else { + document.body.insertBefore(elem, document.body.firstChild); + } + clearInterval(timer); + } + }, 10); + } +} + +})(window); diff --git a/src/dashboard/src/media/vendor/less.js/dist/less-1.1.2.min.js b/src/dashboard/src/media/vendor/less.js/dist/less-1.1.2.min.js new file mode 100644 index 0000000000..9b2fc8a43b --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/dist/less-1.1.2.min.js @@ -0,0 +1,16 @@ +// +// LESS - Leaner CSS v1.1.2 +// http://lesscss.org +// +// Copyright (c) 2009-2011, Alexis Sellier +// Licensed under the Apache 2.0 License. +// +// +// LESS - Leaner CSS v1.1.2 +// http://lesscss.org +// +// Copyright (c) 2009-2011, Alexis Sellier +// Licensed under the Apache 2.0 License. +// +(function(a,b){function v(a,b){var c="less-error-message:"+p(b),e=["
      ",'
    • {0}
    • ',"
    • {current}
    • ",'
    • {2}
    • ',"
    "].join("\n"),f=document.createElement("div"),g,h;f.id=c,f.className="less-error-message",h="

    "+(a.message||"There is an error in your .less file")+"

    "+'

    '+b+" ",a.extract&&(h+="on line "+a.line+", column "+(a.column+1)+":

    "+e.replace(/\[(-?\d)\]/g,function(b,c){return parseInt(a.line)+parseInt(c)||""}).replace(/\{(\d)\}/g,function(b,c){return a.extract[parseInt(c)]||""}).replace(/\{current\}/,a.extract[1].slice(0,a.column)+''+a.extract[1].slice(a.column)+"")),f.innerHTML=h,q([".less-error-message ul, .less-error-message li {","list-style-type: none;","margin-right: 15px;","padding: 4px 0;","margin: 0;","}",".less-error-message label {","font-size: 12px;","margin-right: 15px;","padding: 4px 0;","color: #cc7777;","}",".less-error-message pre {","color: #ee4444;","padding: 4px 0;","margin: 0;","display: inline-block;","}",".less-error-message pre.ctx {","color: #dd4444;","}",".less-error-message h3 {","font-size: 20px;","font-weight: bold;","padding: 15px 0 5px 0;","margin: 0;","}",".less-error-message a {","color: #10a","}",".less-error-message .error {","color: red;","font-weight: bold;","padding-bottom: 2px;","border-bottom: 1px dashed red;","}"].join("\n"),{title:"error-message"}),f.style.cssText=["font-family: Arial, sans-serif","border: 1px solid #e00","background-color: #eee","border-radius: 5px","-webkit-border-radius: 5px","-moz-border-radius: 5px","color: #e00","padding: 15px","margin-bottom: 15px"].join(";"),d.env=="development"&&(g=setInterval(function(){document.body&&(document.getElementById(c)?document.body.replaceChild(f,document.getElementById(c)):document.body.insertBefore(f,document.body.firstChild),clearInterval(g))},10))}function u(a){d.env=="development"&&typeof console!="undefined"&&console.log("less: "+a)}function t(a){return a&&a.parentNode.removeChild(a)}function s(){if(a.XMLHttpRequest)return new XMLHttpRequest;try{return new ActiveXObject("MSXML2.XMLHTTP.3.0")}catch(b){u("browser doesn't support AJAX.");return null}}function r(a,b,c,e){function i(b,c,d){b.status>=200&&b.status<300?c(b.responseText,b.getResponseHeader("Last-Modified")):typeof d=="function"&&d(b.status,a)}var f=s(),h=g?!1:d.async;typeof f.overrideMimeType=="function"&&f.overrideMimeType("text/css"),f.open("GET",a,h),f.setRequestHeader("Accept",b||"text/x-less, text/css; q=0.9, */*; q=0.5"),f.send(null),g?f.status===0?c(f.responseText):e(f.status,a):h?f.onreadystatechange=function(){f.readyState==4&&i(f,c,e)}:i(f,c,e)}function q(a,b,c){var d,e=b.href?b.href.replace(/\?.*$/,""):"",f="less:"+(b.title||p(e));(d=document.getElementById(f))===null&&(d=document.createElement("style"),d.type="text/css",d.media=b.media||"screen",d.id=f,document.getElementsByTagName("head")[0].appendChild(d));if(d.styleSheet)try{d.styleSheet.cssText=a}catch(g){throw new Error("Couldn't reassign styleSheet.cssText.")}else(function(a){d.childNodes.length>0?d.firstChild.nodeValue!==a.nodeValue&&d.replaceChild(a,d.firstChild):d.appendChild(a)})(document.createTextNode(a));c&&h&&(u("saving "+e+" to cache."),h.setItem(e,a),h.setItem(e+":timestamp",c))}function p(a){return a.replace(/^[a-z]+:\/\/?[^\/]+/,"").replace(/^\//,"").replace(/\?.*$/,"").replace(/\.[^\.\/]+$/,"").replace(/[^\.\w-]+/g,"-").replace(/\./g,":")}function o(b,c,e,f){var g=a.location.href.replace(/[#?].*$/,""),i=b.href.replace(/\?.*$/,""),j=h&&h.getItem(i),k=h&&h.getItem(i+":timestamp"),l={css:j,timestamp:k};/^(https?|file):/.test(i)||(i.charAt(0)=="/"?i=a.location.protocol+"//"+a.location.host+i:i=g.slice(0,g.lastIndexOf("/")+1)+i),r(b.href,b.type,function(a,g){if(!e&&l&&g&&(new Date(g)).valueOf()===(new Date(l.timestamp)).valueOf())q(l.css,b),c(null,b,{local:!0,remaining:f});else try{(new d.Parser({optimization:d.optimization,paths:[i.replace(/[\w\.-]+$/,"")],mime:b.type})).parse(a,function(a,d){if(a)return v(a,i);try{c(d,b,{local:!1,lastModified:g,remaining:f}),t(document.getElementById("less-error-message:"+p(i)))}catch(a){v(a,i)}})}catch(h){v(h,i)}},function(a,b){throw new Error("Couldn't load "+b+" ("+a+")")})}function n(a,b){for(var c=0;c>>0;for(var d=0;d>>0,c=Array(b),d=arguments[1];for(var e=0;e>>0,c=0;if(b===0&&arguments.length===1)throw new TypeError;if(arguments.length>=2)var d=arguments[1];else for(;;){if(c in this){d=this[c++];break}if(++c>=b)throw new TypeError}for(;c=b)return-1;c<0&&(c+=b);for(;ck&&(j[f]=j[f].slice(c-k),k=c)}function q(){j[f]=g,c=h,k=c}function p(){g=j[f],h=c,k=c}var b,c,f,g,h,i,j,k,l,m=this,n=function(){},o=this.imports={paths:a&&a.paths||[],queue:[],files:{},mime:a&&a.mime,push:function(b,c){var e=this;this.queue.push(b),d.Parser.importer(b,this.paths,function(a){e.queue.splice(e.queue.indexOf(b),1),e.files[b]=a,c(a),e.queue.length===0&&n()},a)}};this.env=a=a||{},this.optimization="optimization"in this.env?this.env.optimization:1,this.env.filename=this.env.filename||null;return l={imports:o,parse:function(d,g){var h,l,m,o,p,q,r=[],t,u=null;c=f=k=i=0,j=[],b=d.replace(/\r\n/g,"\n"),j=function(c){var d=0,e=/[^"'`\{\}\/\(\)]+/g,f=/\/\*(?:[^*]|\*+[^\/*])*\*+\/|\/\/.*/g,g=0,h,i=c[0],j,k;for(var l=0,m,n;l0)throw{type:"Syntax",message:"Missing closing `}`",filename:a.filename};return c.map(function(a){return a.join("")})}([[]]),h=new e.Ruleset([],s(this.parsers.primary)),h.root=!0,h.toCSS=function(c){var d,f,g;return function(g,h){function n(a){return a?(b.slice(0,a).match(/\n/g)||"").length:null}var i=[];g=g||{},typeof h=="object"&&!Array.isArray(h)&&(h=Object.keys(h).map(function(a){var b=h[a];b instanceof e.Value||(b instanceof e.Expression||(b=new e.Expression([b])),b=new e.Value([b]));return new e.Rule("@"+a,b,!1,0)}),i=[new e.Ruleset(null,h)]);try{var j=c.call(this,{frames:i}).toCSS([],{compress:g.compress||!1})}catch(k){f=b.split("\n"),d=n(k.index);for(var l=k.index,m=-1;l>=0&&b.charAt(l)!=="\n";l--)m++;throw{type:k.type,message:k.message,filename:a.filename,index:k.index,line:typeof d=="number"?d+1:null,callLine:k.call&&n(k.call)+1,callExtract:f[n(k.call)],stack:k.stack,column:m,extract:[f[d-1],f[d],f[d+1]]}}return g.compress?j.replace(/(\s)+/g,"$1"):j}}(h.eval);if(c=0&&b.charAt(v)!=="\n";v--)w++;u={name:"ParseError",message:"Syntax Error on line "+p,index:c,filename:a.filename,line:p,column:w,extract:[q[p-2],q[p-1],q[p]]}}this.imports.queue.length>0?n=function(){g(u,h)}:g(u,h)},parsers:{primary:function(){var a,b=[];while((a=s(this.mixin.definition)||s(this.rule)||s(this.ruleset)||s(this.mixin.call)||s(this.comment)||s(this.directive))||s(/^[\s\n]+/))a&&b.push(a);return b},comment:function(){var a;if(b.charAt(c)==="/"){if(b.charAt(c+1)==="/")return new e.Comment(s(/^\/\/.*/),!0);if(a=s(/^\/\*(?:[^*]|\*+[^\/*])*\*+\/\n?/))return new e.Comment(a)}},entities:{quoted:function(){var a,d=c,f;b.charAt(d)==="~"&&(d++,f=!0);if(b.charAt(d)==='"'||b.charAt(d)==="'"){f&&s("~");if(a=s(/^"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'/))return new e.Quoted(a[0],a[1]||a[2],f)}},keyword:function(){var a;if(a=s(/^[A-Za-z-]+/))return new e.Keyword(a)},call:function(){var a,b;if(!!(a=/^([\w-]+|%)\(/.exec(j[f]))){a=a[1].toLowerCase();if(a==="url")return null;c+=a.length;if(a==="alpha")return s(this.alpha);s("("),b=s(this.entities.arguments);if(!s(")"))return;if(a)return new e.Call(a,b)}},arguments:function(){var a=[],b;while(b=s(this.expression)){a.push(b);if(!s(","))break}return a},literal:function(){return s(this.entities.dimension)||s(this.entities.color)||s(this.entities.quoted)},url:function(){var a;if(b.charAt(c)==="u"&&!!s(/^url\(/)){a=s(this.entities.quoted)||s(this.entities.variable)||s(this.entities.dataURI)||s(/^[-\w%@$\/.&=:;#+?~]+/)||"";if(!s(")"))throw new Error("missing closing ) for url()");return new e.URL(a.value||a.data||a instanceof e.Variable?a:new e.Anonymous(a),o.paths)}},dataURI:function(){var a;if(s(/^data:/)){a={},a.mime=s(/^[^\/]+\/[^,;)]+/)||"",a.charset=s(/^;\s*charset=[^,;)]+/)||"",a.base64=s(/^;\s*base64/)||"",a.data=s(/^,\s*[^)]+/);if(a.data)return a}},variable:function(){var a,d=c;if(b.charAt(c)==="@"&&(a=s(/^@@?[\w-]+/)))return new e.Variable(a,d)},color:function(){var a;if(b.charAt(c)==="#"&&(a=s(/^#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})/)))return new e.Color(a[1])},dimension:function(){var a,d=b.charCodeAt(c);if(!(d>57||d<45||d===47))if(a=s(/^(-?\d*\.?\d+)(px|%|em|pc|ex|in|deg|s|ms|pt|cm|mm|rad|grad|turn)?/))return new e.Dimension(a[1],a[2])},javascript:function(){var a,d=c,f;b.charAt(d)==="~"&&(d++,f=!0);if(b.charAt(d)==="`"){f&&s("~");if(a=s(/^`([^`]*)`/))return new e.JavaScript(a[1],c,f)}}},variable:function(){var a;if(b.charAt(c)==="@"&&(a=s(/^(@[\w-]+)\s*:/)))return a[1]},shorthand:function(){var a,b;if(!!t(/^[@\w.%-]+\/[@\w.-]+/)&&(a=s(this.entity))&&s("/")&&(b=s(this.entity)))return new e.Shorthand(a,b)},mixin:{call:function(){var a=[],d,f,g,h=c,i=b.charAt(c);if(i==="."||i==="#"){while(d=s(/^[#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/))a.push(new e.Element(f,d)),f=s(">");s("(")&&(g=s(this.entities.arguments))&&s(")");if(a.length>0&&(s(";")||t("}")))return new e.mixin.Call(a,g,h)}},definition:function(){var a,d=[],f,g,h,i;if(!(b.charAt(c)!=="."&&b.charAt(c)!=="#"||t(/^[^{]*(;|})/)))if(f=s(/^([#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+)\s*\(/)){a=f[1];while(h=s(this.entities.variable)||s(this.entities.literal)||s(this.entities.keyword)){if(h instanceof e.Variable)if(s(":"))if(i=s(this.expression))d.push({name:h.name,value:i});else throw new Error("Expected value");else d.push({name:h.name});else d.push({value:h});if(!s(","))break}if(!s(")"))throw new Error("Expected )");g=s(this.block);if(g)return new e.mixin.Definition(a,d,g)}}},entity:function(){return s(this.entities.literal)||s(this.entities.variable)||s(this.entities.url)||s(this.entities.call)||s(this.entities.keyword)||s(this.entities.javascript)||s(this.comment)},end:function(){return s(";")||t("}")},alpha:function(){var a;if(!!s(/^opacity=/i))if(a=s(/^\d+/)||s(this.entities.variable)){if(!s(")"))throw new Error("missing closing ) for alpha()");return new e.Alpha(a)}},element:function(){var a,b,c;c=s(this.combinator),a=s(/^(?:[.#]?|:*)(?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/)||s("*")||s(this.attribute)||s(/^\([^)@]+\)/);if(a)return new e.Element(c,a)},combinator:function(){var a,d=b.charAt(c);if(d===">"||d==="&"||d==="+"||d==="~"){c++;while(b.charAt(c)===" ")c++;return new e.Combinator(d)}if(d===":"&&b.charAt(c+1)===":"){c+=2;while(b.charAt(c)===" ")c++;return new e.Combinator("::")}return b.charAt(c-1)===" "?new e.Combinator(" "):new e.Combinator(null)},selector:function(){var a,d,f=[],g,h;while(d=s(this.element)){g=b.charAt(c),f.push(d);if(g==="{"||g==="}"||g===";"||g===",")break}if(f.length>0)return new e.Selector(f)},tag:function(){return s(/^[a-zA-Z][a-zA-Z-]*[0-9]?/)||s("*")},attribute:function(){var a="",b,c,d;if(!!s("[")){if(b=s(/^[a-zA-Z-]+/)||s(this.entities.quoted))(d=s(/^[|~*$^]?=/))&&(c=s(this.entities.quoted)||s(/^[\w-]+/))?a=[b,d,c.toCSS?c.toCSS():c].join(""):a=b;if(!s("]"))return;if(a)return"["+a+"]"}},block:function(){var a;if(s("{")&&(a=s(this.primary))&&s("}"))return a},ruleset:function(){var a=[],b,d,g;p();if(g=/^([.#: \w-]+)[\s\n]*\{/.exec(j[f]))c+=g[0].length-1,a=[new e.Selector([new e.Element(null,g[1])])];else while(b=s(this.selector)){a.push(b),s(this.comment);if(!s(","))break;s(this.comment)}if(a.length>0&&(d=s(this.block)))return new e.Ruleset(a,d);i=c,q()},rule:function(){var a,d,g=b.charAt(c),k,l;p();if(g!=="."&&g!=="#"&&g!=="&")if(a=s(this.variable)||s(this.property)){a.charAt(0)!="@"&&(l=/^([^@+\/'"*`(;{}-]*);/.exec(j[f]))?(c+=l[0].length-1,d=new e.Anonymous(l[1])):a==="font"?d=s(this.font):d=s(this.value),k=s(this.important);if(d&&s(this.end))return new e.Rule(a,d,k,h);i=c,q()}},"import":function(){var a;if(s(/^@import\s+/)&&(a=s(this.entities.quoted)||s(this.entities.url))&&s(";"))return new e.Import(a,o)},directive:function(){var a,d,f,g;if(b.charAt(c)==="@"){if(d=s(this["import"]))return d;if(a=s(/^@media|@page|@-[-a-z]+/)){g=(s(/^[^{]+/)||"").trim();if(f=s(this.block))return new e.Directive(a+" "+g,f)}else if(a=s(/^@[-a-z]+/))if(a==="@font-face"){if(f=s(this.block))return new e.Directive(a,f)}else if((d=s(this.entity))&&s(";"))return new e.Directive(a,d)}},font:function(){var a=[],b=[],c,d,f,g;while(g=s(this.shorthand)||s(this.entity))b.push(g);a.push(new e.Expression(b));if(s(","))while(g=s(this.expression)){a.push(g);if(!s(","))break}return new e.Value(a)},value:function(){var a,b=[],c;while(a=s(this.expression)){b.push(a);if(!s(","))break}if(b.length>0)return new e.Value(b)},important:function(){if(b.charAt(c)==="!")return s(/^! *important/)},sub:function(){var a;if(s("(")&&(a=s(this.expression))&&s(")"))return a},multiplication:function(){var a,b,c,d;if(a=s(this.operand)){while((c=s("/")||s("*"))&&(b=s(this.operand)))d=new e.Operation(c,[d||a,b]);return d||a}},addition:function(){var a,d,f,g;if(a=s(this.multiplication)){while((f=s(/^[-+]\s+/)||b.charAt(c-1)!=" "&&(s("+")||s("-")))&&(d=s(this.multiplication)))g=new e.Operation(f,[g||a,d]);return g||a}},operand:function(){var a,d=b.charAt(c+1);b.charAt(c)==="-"&&(d==="@"||d==="(")&&(a=s("-"));var f=s(this.sub)||s(this.entities.dimension)||s(this.entities.color)||s(this.entities.variable)||s(this.entities.call);return a?new e.Operation("*",[new e.Dimension(-1),f]):f},expression:function(){var a,b,c=[],d;while(a=s(this.addition)||s(this.entity))c.push(a);if(c.length>0)return new e.Expression(c)},property:function(){var a;if(a=s(/^(\*?-?[-a-z_0-9]+)\s*:/))return a[1]}}}},typeof a!="undefined"&&(d.Parser.importer=function(a,b,c,d){a.charAt(0)!=="/"&&b.length>0&&(a=b[0]+a),o({href:a,title:a,type:d.mime},c,!0)}),function(a){function d(a){return Math.min(1,Math.max(0,a))}function c(b){if(b instanceof a.Dimension)return parseFloat(b.unit=="%"?b.value/100:b.value);if(typeof b=="number")return b;throw{error:"RuntimeError",message:"color functions take numbers as parameters"}}function b(b){return a.functions.hsla(b.h,b.s,b.l,b.a)}a.functions={rgb:function(a,b,c){return this.rgba(a,b,c,1)},rgba:function(b,d,e,f){var g=[b,d,e].map(function(a){return c(a)}),f=c(f);return new a.Color(g,f)},hsl:function(a,b,c){return this.hsla(a,b,c,1)},hsla:function(a,b,d,e){function h(a){a=a<0?a+1:a>1?a-1:a;return a*6<1?g+(f-g)*a*6:a*2<1?f:a*3<2?g+(f-g)*(2/3-a)*6:g}a=c(a)%360/360,b=c(b),d=c(d),e=c(e);var f=d<=.5?d*(b+1):d+b-d*b,g=d*2-f;return this.rgba(h(a+1/3)*255,h(a)*255,h(a-1/3)*255,e)},hue:function(b){return new a.Dimension(Math.round(b.toHSL().h))},saturation:function(b){return new a.Dimension(Math.round(b.toHSL().s*100),"%")},lightness:function(b){return new a.Dimension(Math.round(b.toHSL().l*100),"%")},alpha:function(b){return new a.Dimension(b.toHSL().a)},saturate:function(a,c){var e=a.toHSL();e.s+=c.value/100,e.s=d(e.s);return b(e)},desaturate:function(a,c){var e=a.toHSL();e.s-=c.value/100,e.s=d(e.s);return b(e)},lighten:function(a,c){var e=a.toHSL();e.l+=c.value/100,e.l=d(e.l);return b(e)},darken:function(a,c){var e=a.toHSL();e.l-=c.value/100,e.l=d(e.l);return b(e)},fadein:function(a,c){var e=a.toHSL();e.a+=c.value/100,e.a=d(e.a);return b(e)},fadeout:function(a,c){var e=a.toHSL();e.a-=c.value/100,e.a=d(e.a);return b(e)},spin:function(a,c){var d=a.toHSL(),e=(d.h+c.value)%360;d.h=e<0?360+e:e;return b(d)},mix:function(b,c,d){var e=d.value/100,f=e*2-1,g=b.toHSL().a-c.toHSL().a,h=((f*g==-1?f:(f+g)/(1+f*g))+1)/2,i=1-h,j=[b.rgb[0]*h+c.rgb[0]*i,b.rgb[1]*h+c.rgb[1]*i,b.rgb[2]*h+c.rgb[2]*i],k=b.alpha*e+c.alpha*(1-e);return new a.Color(j,k)},greyscale:function(b){return this.desaturate(b,new a.Dimension(100))},e:function(b){return new a.Anonymous(b instanceof a.JavaScript?b.evaluated:b)},escape:function(b){return new a.Anonymous(encodeURI(b.value).replace(/=/g,"%3D").replace(/:/g,"%3A").replace(/#/g,"%23").replace(/;/g,"%3B").replace(/\(/g,"%28").replace(/\)/g,"%29"))},"%":function(b){var c=Array.prototype.slice.call(arguments,1),d=b.value;for(var e=0;e255?255:a<0?0:a).toString(16);return a.length===1?"0"+a:a}).join("")},operate:function(b,c){var d=[];c instanceof a.Color||(c=c.toColor());for(var e=0;e<3;e++)d[e]=a.operate(b,this.rgb[e],c.rgb[e]);return new a.Color(d,this.alpha+c.alpha)},toHSL:function(){var a=this.rgb[0]/255,b=this.rgb[1]/255,c=this.rgb[2]/255,d=this.alpha,e=Math.max(a,b,c),f=Math.min(a,b,c),g,h,i=(e+f)/2,j=e-f;if(e===f)g=h=0;else{h=i>.5?j/(2-e-f):j/(e+f);switch(e){case a:g=(b-c)/j+(b":a.compress?">":" > "}[this.value]}}(c("less/tree")),function(a){a.Expression=function(a){this.value=a},a.Expression.prototype={eval:function(b){return this.value.length>1?new a.Expression(this.value.map(function(a){return a.eval(b)})):this.value.length===1?this.value[0].eval(b):this},toCSS:function(a){return this.value.map(function(b){return b.toCSS(a)}).join(" ")}}}(c("less/tree")),function(a){a.Import=function(b,c){var d=this;this._path=b,b instanceof a.Quoted?this.path=/\.(le?|c)ss$/.test(b.value)?b.value:b.value+".less":this.path=b.value.value||b.value,this.css=/css$/.test(this.path),this.css||c.push(this.path,function(a){if(!a)throw new Error("Error parsing "+d.path);d.root=a})},a.Import.prototype={toCSS:function(){return this.css?"@import "+this._path.toCSS()+";\n":""},eval:function(b){var c;if(this.css)return this;c=new a.Ruleset(null,this.root.rules.slice(0));for(var d=0;d0){c=this.arguments&&this.arguments.map(function(b){return b.eval(a)});for(var g=0;g0&&c>this.params.length)return!1;d=Math.min(c,this.arity);for(var e=0;e1?Array.prototype.push.apply(d,e.find(new a.Selector(b.elements.slice(1)),c)):d.push(e);break}});return this._lookups[g]=d},toCSS:function(b,c){var d=[],e=[],f=[],g=[],h,i;if(!this.root)if(b.length===0)g=this.selectors.map(function(a){return[a]});else for(var j=0;j0&&(h=g.map(function(a){return a.map(function(a){return a.toCSS(c)}).join("").trim()}).join(c.compress?",":g.length>3?",\n":", "),d.push(h,(c.compress?"{":" {\n ")+e.join(c.compress?"":"\n ")+(c.compress?"}":"\n}\n"))),d.push(f);return d.join("")+(c.compress?"\n":"")}}}(c("less/tree")),function(a){a.Selector=function(a){this.elements=a,this.elements[0].combinator.value===""&&(this.elements[0].combinator.value=" ")},a.Selector.prototype.match=function(a){return this.elements[0].value===a.elements[0].value?!0:!1},a.Selector.prototype.toCSS=function(a){if(this._css)return this._css;return this._css=this.elements.map(function(b){return typeof b=="string"?" "+b.trim():b.toCSS(a)}).join("")}}(c("less/tree")),function(b){b.URL=function(b,c){b.data?this.attrs=b:(!/^(?:https?:\/|file:\/|data:\/)?\//.test(b.value)&&c.length>0&&typeof a!="undefined"&&(b.value=c[0]+(b.value.charAt(0)==="/"?b.value.slice(1):b.value)),this.value=b,this.paths=c)},b.URL.prototype={toCSS:function(){return"url("+(this.attrs?"data:"+this.attrs.mime+this.attrs.charset+this.attrs.base64+this.attrs.data:this.value.toCSS())+")"},eval:function(a){return this.attrs?this:new b.URL(this.value.eval(a),this.paths)}}}(c("less/tree")),function(a){a.Value=function(a){this.value=a,this.is="value"},a.Value.prototype={eval:function(b){return this.value.length===1?this.value[0].eval(b):new a.Value(this.value.map(function(a){return a.eval(b)}))},toCSS:function(a){return this.value.map(function(b){return b.toCSS(a)}).join(a.compress?",":", ")}}}(c("less/tree")),function(a){a.Variable=function(a,b){this.name=a,this.index=b},a.Variable.prototype={eval:function(b){var c,d,e=this.name;e.indexOf("@@")==0&&(e="@"+(new a.Variable(e.slice(1))).eval(b).value);if(c=a.find(b.frames,function(a){ +if(d=a.variable(e))return d.value.eval(b)}))return c;throw{message:"variable "+e+" is undefined",index:this.index}}}}(c("less/tree")),c("less/tree").find=function(a,b){for(var c=0,d;c1?"["+a.value.map(function(a){return a.toCSS(!1)}).join(", ")+"]":a.toCSS(!1)};var g=location.protocol==="file:"||location.protocol==="chrome:"||location.protocol==="chrome-extension:"||location.protocol==="resource:";d.env=d.env||(location.hostname=="127.0.0.1"||location.hostname=="0.0.0.0"||location.hostname=="localhost"||location.port.length>0||g?"development":"production"),d.async=!1,d.poll=d.poll||(g?1e3:1500),d.watch=function(){return this.watchMode=!0},d.unwatch=function(){return this.watchMode=!1},d.env==="development"?(d.optimization=0,/!watch/.test(location.hash)&&d.watch(),d.watchTimer=setInterval(function(){d.watchMode&&n(function(a,b,c){a&&q(a.toCSS(),b,c.lastModified)})},d.poll)):d.optimization=3;var h;try{h=typeof a.localStorage=="undefined"?null:a.localStorage}catch(i){h=null}var j=document.getElementsByTagName("link"),k=/^text\/(x-)?less$/;d.sheets=[];for(var l=0;l>> 0; + for (var i = 0; i < len; i++) { + if (i in this) { + block.call(thisObject, this[i], i, this); + } + } + }; +} +if (!Array.prototype.map) { + Array.prototype.map = function(fun /*, thisp*/) { + var len = this.length >>> 0; + var res = new Array(len); + var thisp = arguments[1]; + + for (var i = 0; i < len; i++) { + if (i in this) { + res[i] = fun.call(thisp, this[i], i, this); + } + } + return res; + }; +} +if (!Array.prototype.filter) { + Array.prototype.filter = function (block /*, thisp */) { + var values = []; + var thisp = arguments[1]; + for (var i = 0; i < this.length; i++) { + if (block.call(thisp, this[i])) { + values.push(this[i]); + } + } + return values; + }; +} +if (!Array.prototype.reduce) { + Array.prototype.reduce = function(fun /*, initial*/) { + var len = this.length >>> 0; + var i = 0; + + // no value to return if no initial value and an empty array + if (len === 0 && arguments.length === 1) throw new TypeError(); + + if (arguments.length >= 2) { + var rv = arguments[1]; + } else { + do { + if (i in this) { + rv = this[i++]; + break; + } + // if array contains no values, no initial value to return + if (++i >= len) throw new TypeError(); + } while (true); + } + for (; i < len; i++) { + if (i in this) { + rv = fun.call(null, rv, this[i], i, this); + } + } + return rv; + }; +} +if (!Array.prototype.indexOf) { + Array.prototype.indexOf = function (value /*, fromIndex */ ) { + var length = this.length; + var i = arguments[1] || 0; + + if (!length) return -1; + if (i >= length) return -1; + if (i < 0) i += length; + + for (; i < length; i++) { + if (!Object.prototype.hasOwnProperty.call(this, i)) { continue } + if (value === this[i]) return i; + } + return -1; + }; +} + +// +// Object +// +if (!Object.keys) { + Object.keys = function (object) { + var keys = []; + for (var name in object) { + if (Object.prototype.hasOwnProperty.call(object, name)) { + keys.push(name); + } + } + return keys; + }; +} + +// +// String +// +if (!String.prototype.trim) { + String.prototype.trim = function () { + return String(this).replace(/^\s\s*/, '').replace(/\s\s*$/, ''); + }; +} +var less, tree; + +if (typeof(window) === 'undefined') { + less = exports, + tree = require('less/tree'); +} else { + if (typeof(window.less) === 'undefined') { window.less = {} } + less = window.less, + tree = window.less.tree = {}; +} +// +// less.js - parser +// +// A relatively straight-forward predictive parser. +// There is no tokenization/lexing stage, the input is parsed +// in one sweep. +// +// To make the parser fast enough to run in the browser, several +// optimization had to be made: +// +// - Matching and slicing on a huge input is often cause of slowdowns. +// The solution is to chunkify the input into smaller strings. +// The chunks are stored in the `chunks` var, +// `j` holds the current chunk index, and `current` holds +// the index of the current chunk in relation to `input`. +// This gives us an almost 4x speed-up. +// +// - In many cases, we don't need to match individual tokens; +// for example, if a value doesn't hold any variables, operations +// or dynamic references, the parser can effectively 'skip' it, +// treating it as a literal. +// An example would be '1px solid #000' - which evaluates to itself, +// we don't need to know what the individual components are. +// The drawback, of course is that you don't get the benefits of +// syntax-checking on the CSS. This gives us a 50% speed-up in the parser, +// and a smaller speed-up in the code-gen. +// +// +// Token matching is done with the `$` function, which either takes +// a terminal string or regexp, or a non-terminal function to call. +// It also takes care of moving all the indices forwards. +// +// +less.Parser = function Parser(env) { + var input, // LeSS input string + i, // current index in `input` + j, // current chunk + temp, // temporarily holds a chunk's state, for backtracking + memo, // temporarily holds `i`, when backtracking + furthest, // furthest index the parser has gone to + chunks, // chunkified input + current, // index of current chunk, in `input` + parser; + + var that = this; + + // This function is called after all files + // have been imported through `@import`. + var finish = function () {}; + + var imports = this.imports = { + paths: env && env.paths || [], // Search paths, when importing + queue: [], // Files which haven't been imported yet + files: {}, // Holds the imported parse trees + mime: env && env.mime, // MIME type of .less files + push: function (path, callback) { + var that = this; + this.queue.push(path); + + // + // Import a file asynchronously + // + less.Parser.importer(path, this.paths, function (root) { + that.queue.splice(that.queue.indexOf(path), 1); // Remove the path from the queue + that.files[path] = root; // Store the root + + callback(root); + + if (that.queue.length === 0) { finish() } // Call `finish` if we're done importing + }, env); + } + }; + + function save() { temp = chunks[j], memo = i, current = i } + function restore() { chunks[j] = temp, i = memo, current = i } + + function sync() { + if (i > current) { + chunks[j] = chunks[j].slice(i - current); + current = i; + } + } + // + // Parse from a token, regexp or string, and move forward if match + // + function $(tok) { + var match, args, length, c, index, endIndex, k, mem; + + // + // Non-terminal + // + if (tok instanceof Function) { + return tok.call(parser.parsers); + // + // Terminal + // + // Either match a single character in the input, + // or match a regexp in the current chunk (chunk[j]). + // + } else if (typeof(tok) === 'string') { + match = input.charAt(i) === tok ? tok : null; + length = 1; + sync (); + } else { + sync (); + + if (match = tok.exec(chunks[j])) { + length = match[0].length; + } else { + return null; + } + } + + // The match is confirmed, add the match length to `i`, + // and consume any extra white-space characters (' ' || '\n') + // which come after that. The reason for this is that LeSS's + // grammar is mostly white-space insensitive. + // + if (match) { + mem = i += length; + endIndex = i + chunks[j].length - length; + + while (i < endIndex) { + c = input.charCodeAt(i); + if (! (c === 32 || c === 10 || c === 9)) { break } + i++; + } + chunks[j] = chunks[j].slice(length + (i - mem)); + current = i; + + if (chunks[j].length === 0 && j < chunks.length - 1) { j++ } + + if(typeof(match) === 'string') { + return match; + } else { + return match.length === 1 ? match[0] : match; + } + } + } + + // Same as $(), but don't change the state of the parser, + // just return the match. + function peek(tok) { + if (typeof(tok) === 'string') { + return input.charAt(i) === tok; + } else { + if (tok.test(chunks[j])) { + return true; + } else { + return false; + } + } + } + + this.env = env = env || {}; + + // The optimization level dictates the thoroughness of the parser, + // the lower the number, the less nodes it will create in the tree. + // This could matter for debugging, or if you want to access + // the individual nodes in the tree. + this.optimization = ('optimization' in this.env) ? this.env.optimization : 1; + + this.env.filename = this.env.filename || null; + + // + // The Parser + // + return parser = { + + imports: imports, + // + // Parse an input string into an abstract syntax tree, + // call `callback` when done. + // + parse: function (str, callback) { + var root, start, end, zone, line, lines, buff = [], c, error = null; + + i = j = current = furthest = 0; + chunks = []; + input = str.replace(/\r\n/g, '\n'); + + // Split the input into chunks. + chunks = (function (chunks) { + var j = 0, + skip = /[^"'`\{\}\/\(\)]+/g, + comment = /\/\*(?:[^*]|\*+[^\/*])*\*+\/|\/\/.*/g, + level = 0, + match, + chunk = chunks[0], + inParam, + inString; + + for (var i = 0, c, cc; i < input.length; i++) { + skip.lastIndex = i; + if (match = skip.exec(input)) { + if (match.index === i) { + i += match[0].length; + chunk.push(match[0]); + } + } + c = input.charAt(i); + comment.lastIndex = i; + + if (!inString && !inParam && c === '/') { + cc = input.charAt(i + 1); + if (cc === '/' || cc === '*') { + if (match = comment.exec(input)) { + if (match.index === i) { + i += match[0].length; + chunk.push(match[0]); + c = input.charAt(i); + } + } + } + } + + if (c === '{' && !inString && !inParam) { level ++; + chunk.push(c); + } else if (c === '}' && !inString && !inParam) { level --; + chunk.push(c); + chunks[++j] = chunk = []; + } else if (c === '(' && !inString && !inParam) { + chunk.push(c); + inParam = true; + } else if (c === ')' && !inString && inParam) { + chunk.push(c); + inParam = false; + } else { + if (c === '"' || c === "'" || c === '`') { + if (! inString) { + inString = c; + } else { + inString = inString === c ? false : inString; + } + } + chunk.push(c); + } + } + if (level > 0) { + throw { + type: 'Syntax', + message: "Missing closing `}`", + filename: env.filename + }; + } + + return chunks.map(function (c) { return c.join('') });; + })([[]]); + + // Start with the primary rule. + // The whole syntax tree is held under a Ruleset node, + // with the `root` property set to true, so no `{}` are + // output. The callback is called when the input is parsed. + root = new(tree.Ruleset)([], $(this.parsers.primary)); + root.root = true; + + root.toCSS = (function (evaluate) { + var line, lines, column; + + return function (options, variables) { + var frames = []; + + options = options || {}; + // + // Allows setting variables with a hash, so: + // + // `{ color: new(tree.Color)('#f01') }` will become: + // + // new(tree.Rule)('@color', + // new(tree.Value)([ + // new(tree.Expression)([ + // new(tree.Color)('#f01') + // ]) + // ]) + // ) + // + if (typeof(variables) === 'object' && !Array.isArray(variables)) { + variables = Object.keys(variables).map(function (k) { + var value = variables[k]; + + if (! (value instanceof tree.Value)) { + if (! (value instanceof tree.Expression)) { + value = new(tree.Expression)([value]); + } + value = new(tree.Value)([value]); + } + return new(tree.Rule)('@' + k, value, false, 0); + }); + frames = [new(tree.Ruleset)(null, variables)]; + } + + try { + var css = evaluate.call(this, { frames: frames }) + .toCSS([], { compress: options.compress || false }); + } catch (e) { + lines = input.split('\n'); + line = getLine(e.index); + + for (var n = e.index, column = -1; + n >= 0 && input.charAt(n) !== '\n'; + n--) { column++ } + + throw { + type: e.type, + message: e.message, + filename: env.filename, + index: e.index, + line: typeof(line) === 'number' ? line + 1 : null, + callLine: e.call && (getLine(e.call) + 1), + callExtract: lines[getLine(e.call)], + stack: e.stack, + column: column, + extract: [ + lines[line - 1], + lines[line], + lines[line + 1] + ] + }; + } + if (options.compress) { + return css.replace(/(\s)+/g, "$1"); + } else { + return css; + } + + function getLine(index) { + return index ? (input.slice(0, index).match(/\n/g) || "").length : null; + } + }; + })(root.eval); + + // If `i` is smaller than the `input.length - 1`, + // it means the parser wasn't able to parse the whole + // string, so we've got a parsing error. + // + // We try to extract a \n delimited string, + // showing the line where the parse error occured. + // We split it up into two parts (the part which parsed, + // and the part which didn't), so we can color them differently. + if (i < input.length - 1) { + i = furthest; + lines = input.split('\n'); + line = (input.slice(0, i).match(/\n/g) || "").length + 1; + + for (var n = i, column = -1; n >= 0 && input.charAt(n) !== '\n'; n--) { column++ } + + error = { + name: "ParseError", + message: "Syntax Error on line " + line, + index: i, + filename: env.filename, + line: line, + column: column, + extract: [ + lines[line - 2], + lines[line - 1], + lines[line] + ] + }; + } + + if (this.imports.queue.length > 0) { + finish = function () { callback(error, root) }; + } else { + callback(error, root); + } + }, + + // + // Here in, the parsing rules/functions + // + // The basic structure of the syntax tree generated is as follows: + // + // Ruleset -> Rule -> Value -> Expression -> Entity + // + // Here's some LESS code: + // + // .class { + // color: #fff; + // border: 1px solid #000; + // width: @w + 4px; + // > .child {...} + // } + // + // And here's what the parse tree might look like: + // + // Ruleset (Selector '.class', [ + // Rule ("color", Value ([Expression [Color #fff]])) + // Rule ("border", Value ([Expression [Dimension 1px][Keyword "solid"][Color #000]])) + // Rule ("width", Value ([Expression [Operation "+" [Variable "@w"][Dimension 4px]]])) + // Ruleset (Selector [Element '>', '.child'], [...]) + // ]) + // + // In general, most rules will try to parse a token with the `$()` function, and if the return + // value is truly, will return a new node, of the relevant type. Sometimes, we need to check + // first, before parsing, that's when we use `peek()`. + // + parsers: { + // + // The `primary` rule is the *entry* and *exit* point of the parser. + // The rules here can appear at any level of the parse tree. + // + // The recursive nature of the grammar is an interplay between the `block` + // rule, which represents `{ ... }`, the `ruleset` rule, and this `primary` rule, + // as represented by this simplified grammar: + // + // primary → (ruleset | rule)+ + // ruleset → selector+ block + // block → '{' primary '}' + // + // Only at one point is the primary rule not called from the + // block rule: at the root level. + // + primary: function () { + var node, root = []; + + while ((node = $(this.mixin.definition) || $(this.rule) || $(this.ruleset) || + $(this.mixin.call) || $(this.comment) || $(this.directive)) + || $(/^[\s\n]+/)) { + node && root.push(node); + } + return root; + }, + + // We create a Comment node for CSS comments `/* */`, + // but keep the LeSS comments `//` silent, by just skipping + // over them. + comment: function () { + var comment; + + if (input.charAt(i) !== '/') return; + + if (input.charAt(i + 1) === '/') { + return new(tree.Comment)($(/^\/\/.*/), true); + } else if (comment = $(/^\/\*(?:[^*]|\*+[^\/*])*\*+\/\n?/)) { + return new(tree.Comment)(comment); + } + }, + + // + // Entities are tokens which can be found inside an Expression + // + entities: { + // + // A string, which supports escaping " and ' + // + // "milky way" 'he\'s the one!' + // + quoted: function () { + var str, j = i, e; + + if (input.charAt(j) === '~') { j++, e = true } // Escaped strings + if (input.charAt(j) !== '"' && input.charAt(j) !== "'") return; + + e && $('~'); + + if (str = $(/^"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'/)) { + return new(tree.Quoted)(str[0], str[1] || str[2], e); + } + }, + + // + // A catch-all word, such as: + // + // black border-collapse + // + keyword: function () { + var k; + if (k = $(/^[A-Za-z-]+/)) { return new(tree.Keyword)(k) } + }, + + // + // A function call + // + // rgb(255, 0, 255) + // + // We also try to catch IE's `alpha()`, but let the `alpha` parser + // deal with the details. + // + // The arguments are parsed with the `entities.arguments` parser. + // + call: function () { + var name, args, index = i; + + if (! (name = /^([\w-]+|%)\(/.exec(chunks[j]))) return; + + name = name[1].toLowerCase(); + + if (name === 'url') { return null } + else { i += name.length } + + if (name === 'alpha') { return $(this.alpha) } + + $('('); // Parse the '(' and consume whitespace. + + args = $(this.entities.arguments); + + if (! $(')')) return; + + if (name) { return new(tree.Call)(name, args, index) } + }, + arguments: function () { + var args = [], arg; + + while (arg = $(this.expression)) { + args.push(arg); + if (! $(',')) { break } + } + return args; + }, + literal: function () { + return $(this.entities.dimension) || + $(this.entities.color) || + $(this.entities.quoted); + }, + + // + // Parse url() tokens + // + // We use a specific rule for urls, because they don't really behave like + // standard function calls. The difference is that the argument doesn't have + // to be enclosed within a string, so it can't be parsed as an Expression. + // + url: function () { + var value; + + if (input.charAt(i) !== 'u' || !$(/^url\(/)) return; + value = $(this.entities.quoted) || $(this.entities.variable) || + $(this.entities.dataURI) || $(/^[-\w%@$\/.&=:;#+?~]+/) || ""; + if (! $(')')) throw new(Error)("missing closing ) for url()"); + + return new(tree.URL)((value.value || value.data || value instanceof tree.Variable) + ? value : new(tree.Anonymous)(value), imports.paths); + }, + + dataURI: function () { + var obj; + + if ($(/^data:/)) { + obj = {}; + obj.mime = $(/^[^\/]+\/[^,;)]+/) || ''; + obj.charset = $(/^;\s*charset=[^,;)]+/) || ''; + obj.base64 = $(/^;\s*base64/) || ''; + obj.data = $(/^,\s*[^)]+/); + + if (obj.data) { return obj } + } + }, + + // + // A Variable entity, such as `@fink`, in + // + // width: @fink + 2px + // + // We use a different parser for variable definitions, + // see `parsers.variable`. + // + variable: function () { + var name, index = i; + + if (input.charAt(i) === '@' && (name = $(/^@@?[\w-]+/))) { + return new(tree.Variable)(name, index); + } + }, + + // + // A Hexadecimal color + // + // #4F3C2F + // + // `rgb` and `hsl` colors are parsed through the `entities.call` parser. + // + color: function () { + var rgb; + + if (input.charAt(i) === '#' && (rgb = $(/^#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})/))) { + return new(tree.Color)(rgb[1]); + } + }, + + // + // A Dimension, that is, a number and a unit + // + // 0.5em 95% + // + dimension: function () { + var value, c = input.charCodeAt(i); + if ((c > 57 || c < 45) || c === 47) return; + + if (value = $(/^(-?\d*\.?\d+)(px|%|em|pc|ex|in|deg|s|ms|pt|cm|mm|rad|grad|turn)?/)) { + return new(tree.Dimension)(value[1], value[2]); + } + }, + + // + // JavaScript code to be evaluated + // + // `window.location.href` + // + javascript: function () { + var str, j = i, e; + + if (input.charAt(j) === '~') { j++, e = true } // Escaped strings + if (input.charAt(j) !== '`') { return } + + e && $('~'); + + if (str = $(/^`([^`]*)`/)) { + return new(tree.JavaScript)(str[1], i, e); + } + } + }, + + // + // The variable part of a variable definition. Used in the `rule` parser + // + // @fink: + // + variable: function () { + var name; + + if (input.charAt(i) === '@' && (name = $(/^(@[\w-]+)\s*:/))) { return name[1] } + }, + + // + // A font size/line-height shorthand + // + // small/12px + // + // We need to peek first, or we'll match on keywords and dimensions + // + shorthand: function () { + var a, b; + + if (! peek(/^[@\w.%-]+\/[@\w.-]+/)) return; + + if ((a = $(this.entity)) && $('/') && (b = $(this.entity))) { + return new(tree.Shorthand)(a, b); + } + }, + + // + // Mixins + // + mixin: { + // + // A Mixin call, with an optional argument list + // + // #mixins > .square(#fff); + // .rounded(4px, black); + // .button; + // + // The `while` loop is there because mixins can be + // namespaced, but we only support the child and descendant + // selector for now. + // + call: function () { + var elements = [], e, c, args, index = i, s = input.charAt(i); + + if (s !== '.' && s !== '#') { return } + + while (e = $(/^[#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/)) { + elements.push(new(tree.Element)(c, e)); + c = $('>'); + } + $('(') && (args = $(this.entities.arguments)) && $(')'); + + if (elements.length > 0 && ($(';') || peek('}'))) { + return new(tree.mixin.Call)(elements, args, index); + } + }, + + // + // A Mixin definition, with a list of parameters + // + // .rounded (@radius: 2px, @color) { + // ... + // } + // + // Until we have a finer grained state-machine, we have to + // do a look-ahead, to make sure we don't have a mixin call. + // See the `rule` function for more information. + // + // We start by matching `.rounded (`, and then proceed on to + // the argument list, which has optional default values. + // We store the parameters in `params`, with a `value` key, + // if there is a value, such as in the case of `@radius`. + // + // Once we've got our params list, and a closing `)`, we parse + // the `{...}` block. + // + definition: function () { + var name, params = [], match, ruleset, param, value; + + if ((input.charAt(i) !== '.' && input.charAt(i) !== '#') || + peek(/^[^{]*(;|})/)) return; + + if (match = $(/^([#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+)\s*\(/)) { + name = match[1]; + + while (param = $(this.entities.variable) || $(this.entities.literal) + || $(this.entities.keyword)) { + // Variable + if (param instanceof tree.Variable) { + if ($(':')) { + if (value = $(this.expression)) { + params.push({ name: param.name, value: value }); + } else { + throw new(Error)("Expected value"); + } + } else { + params.push({ name: param.name }); + } + } else { + params.push({ value: param }); + } + if (! $(',')) { break } + } + if (! $(')')) throw new(Error)("Expected )"); + + ruleset = $(this.block); + + if (ruleset) { + return new(tree.mixin.Definition)(name, params, ruleset); + } + } + } + }, + + // + // Entities are the smallest recognized token, + // and can be found inside a rule's value. + // + entity: function () { + return $(this.entities.literal) || $(this.entities.variable) || $(this.entities.url) || + $(this.entities.call) || $(this.entities.keyword) || $(this.entities.javascript) || + $(this.comment); + }, + + // + // A Rule terminator. Note that we use `peek()` to check for '}', + // because the `block` rule will be expecting it, but we still need to make sure + // it's there, if ';' was ommitted. + // + end: function () { + return $(';') || peek('}'); + }, + + // + // IE's alpha function + // + // alpha(opacity=88) + // + alpha: function () { + var value; + + if (! $(/^\(opacity=/i)) return; + if (value = $(/^\d+/) || $(this.entities.variable)) { + if (! $(')')) throw new(Error)("missing closing ) for alpha()"); + return new(tree.Alpha)(value); + } + }, + + // + // A Selector Element + // + // div + // + h1 + // #socks + // input[type="text"] + // + // Elements are the building blocks for Selectors, + // they are made out of a `Combinator` (see combinator rule), + // and an element name, such as a tag a class, or `*`. + // + element: function () { + var e, t, c; + + c = $(this.combinator); + e = $(/^(?:[.#]?|:*)(?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/) || $('*') || $(this.attribute) || $(/^\([^)@]+\)/); + + if (e) { return new(tree.Element)(c, e) } + }, + + // + // Combinators combine elements together, in a Selector. + // + // Because our parser isn't white-space sensitive, special care + // has to be taken, when parsing the descendant combinator, ` `, + // as it's an empty space. We have to check the previous character + // in the input, to see if it's a ` ` character. More info on how + // we deal with this in *combinator.js*. + // + combinator: function () { + var match, c = input.charAt(i); + + if (c === '>' || c === '&' || c === '+' || c === '~') { + i++; + while (input.charAt(i) === ' ') { i++ } + return new(tree.Combinator)(c); + } else if (c === ':' && input.charAt(i + 1) === ':') { + i += 2; + while (input.charAt(i) === ' ') { i++ } + return new(tree.Combinator)('::'); + } else if (input.charAt(i - 1) === ' ') { + return new(tree.Combinator)(" "); + } else { + return new(tree.Combinator)(null); + } + }, + + // + // A CSS Selector + // + // .class > div + h1 + // li a:hover + // + // Selectors are made out of one or more Elements, see above. + // + selector: function () { + var sel, e, elements = [], c, match; + + while (e = $(this.element)) { + c = input.charAt(i); + elements.push(e) + if (c === '{' || c === '}' || c === ';' || c === ',') { break } + } + + if (elements.length > 0) { return new(tree.Selector)(elements) } + }, + tag: function () { + return $(/^[a-zA-Z][a-zA-Z-]*[0-9]?/) || $('*'); + }, + attribute: function () { + var attr = '', key, val, op; + + if (! $('[')) return; + + if (key = $(/^[a-zA-Z-]+/) || $(this.entities.quoted)) { + if ((op = $(/^[|~*$^]?=/)) && + (val = $(this.entities.quoted) || $(/^[\w-]+/))) { + attr = [key, op, val.toCSS ? val.toCSS() : val].join(''); + } else { attr = key } + } + + if (! $(']')) return; + + if (attr) { return "[" + attr + "]" } + }, + + // + // The `block` rule is used by `ruleset` and `mixin.definition`. + // It's a wrapper around the `primary` rule, with added `{}`. + // + block: function () { + var content; + + if ($('{') && (content = $(this.primary)) && $('}')) { + return content; + } + }, + + // + // div, .class, body > p {...} + // + ruleset: function () { + var selectors = [], s, rules, match; + save(); + + if (match = /^([.#: \w-]+)[\s\n]*\{/.exec(chunks[j])) { + i += match[0].length - 1; + selectors = [new(tree.Selector)([new(tree.Element)(null, match[1])])]; + } else { + while (s = $(this.selector)) { + selectors.push(s); + $(this.comment); + if (! $(',')) { break } + $(this.comment); + } + } + + if (selectors.length > 0 && (rules = $(this.block))) { + return new(tree.Ruleset)(selectors, rules); + } else { + // Backtrack + furthest = i; + restore(); + } + }, + rule: function () { + var name, value, c = input.charAt(i), important, match; + save(); + + if (c === '.' || c === '#' || c === '&') { return } + + if (name = $(this.variable) || $(this.property)) { + if ((name.charAt(0) != '@') && (match = /^([^@+\/'"*`(;{}-]*);/.exec(chunks[j]))) { + i += match[0].length - 1; + value = new(tree.Anonymous)(match[1]); + } else if (name === "font") { + value = $(this.font); + } else { + value = $(this.value); + } + important = $(this.important); + + if (value && $(this.end)) { + return new(tree.Rule)(name, value, important, memo); + } else { + furthest = i; + restore(); + } + } + }, + + // + // An @import directive + // + // @import "lib"; + // + // Depending on our environemnt, importing is done differently: + // In the browser, it's an XHR request, in Node, it would be a + // file-system operation. The function used for importing is + // stored in `import`, which we pass to the Import constructor. + // + "import": function () { + var path; + if ($(/^@import\s+/) && + (path = $(this.entities.quoted) || $(this.entities.url)) && + $(';')) { + return new(tree.Import)(path, imports); + } + }, + + // + // A CSS Directive + // + // @charset "utf-8"; + // + directive: function () { + var name, value, rules, types; + + if (input.charAt(i) !== '@') return; + + if (value = $(this['import'])) { + return value; + } else if (name = $(/^@media|@page|@-[-a-z]+/)) { + types = ($(/^[^{]+/) || '').trim(); + if (rules = $(this.block)) { + return new(tree.Directive)(name + " " + types, rules); + } + } else if (name = $(/^@[-a-z]+/)) { + if (name === '@font-face') { + if (rules = $(this.block)) { + return new(tree.Directive)(name, rules); + } + } else if ((value = $(this.entity)) && $(';')) { + return new(tree.Directive)(name, value); + } + } + }, + font: function () { + var value = [], expression = [], weight, shorthand, font, e; + + while (e = $(this.shorthand) || $(this.entity)) { + expression.push(e); + } + value.push(new(tree.Expression)(expression)); + + if ($(',')) { + while (e = $(this.expression)) { + value.push(e); + if (! $(',')) { break } + } + } + return new(tree.Value)(value); + }, + + // + // A Value is a comma-delimited list of Expressions + // + // font-family: Baskerville, Georgia, serif; + // + // In a Rule, a Value represents everything after the `:`, + // and before the `;`. + // + value: function () { + var e, expressions = [], important; + + while (e = $(this.expression)) { + expressions.push(e); + if (! $(',')) { break } + } + + if (expressions.length > 0) { + return new(tree.Value)(expressions); + } + }, + important: function () { + if (input.charAt(i) === '!') { + return $(/^! *important/); + } + }, + sub: function () { + var e; + + if ($('(') && (e = $(this.expression)) && $(')')) { + return e; + } + }, + multiplication: function () { + var m, a, op, operation; + if (m = $(this.operand)) { + while ((op = ($('/') || $('*'))) && (a = $(this.operand))) { + operation = new(tree.Operation)(op, [operation || m, a]); + } + return operation || m; + } + }, + addition: function () { + var m, a, op, operation; + if (m = $(this.multiplication)) { + while ((op = $(/^[-+]\s+/) || (input.charAt(i - 1) != ' ' && ($('+') || $('-')))) && + (a = $(this.multiplication))) { + operation = new(tree.Operation)(op, [operation || m, a]); + } + return operation || m; + } + }, + + // + // An operand is anything that can be part of an operation, + // such as a Color, or a Variable + // + operand: function () { + var negate, p = input.charAt(i + 1); + + if (input.charAt(i) === '-' && (p === '@' || p === '(')) { negate = $('-') } + var o = $(this.sub) || $(this.entities.dimension) || + $(this.entities.color) || $(this.entities.variable) || + $(this.entities.call); + return negate ? new(tree.Operation)('*', [new(tree.Dimension)(-1), o]) + : o; + }, + + // + // Expressions either represent mathematical operations, + // or white-space delimited Entities. + // + // 1px solid black + // @var * 2 + // + expression: function () { + var e, delim, entities = [], d; + + while (e = $(this.addition) || $(this.entity)) { + entities.push(e); + } + if (entities.length > 0) { + return new(tree.Expression)(entities); + } + }, + property: function () { + var name; + + if (name = $(/^(\*?-?[-a-z_0-9]+)\s*:/)) { + return name[1]; + } + } + } + }; +}; + +if (typeof(window) !== 'undefined') { + // + // Used by `@import` directives + // + less.Parser.importer = function (path, paths, callback, env) { + if (path.charAt(0) !== '/' && paths.length > 0) { + path = paths[0] + path; + } + // We pass `true` as 3rd argument, to force the reload of the import. + // This is so we can get the syntax tree as opposed to just the CSS output, + // as we need this to evaluate the current stylesheet. + loadStyleSheet({ href: path, title: path, type: env.mime }, callback, true); + }; +} + +(function (tree) { + +tree.functions = { + rgb: function (r, g, b) { + return this.rgba(r, g, b, 1.0); + }, + rgba: function (r, g, b, a) { + var rgb = [r, g, b].map(function (c) { return number(c) }), + a = number(a); + return new(tree.Color)(rgb, a); + }, + hsl: function (h, s, l) { + return this.hsla(h, s, l, 1.0); + }, + hsla: function (h, s, l, a) { + h = (number(h) % 360) / 360; + s = number(s); l = number(l); a = number(a); + + var m2 = l <= 0.5 ? l * (s + 1) : l + s - l * s; + var m1 = l * 2 - m2; + + return this.rgba(hue(h + 1/3) * 255, + hue(h) * 255, + hue(h - 1/3) * 255, + a); + + function hue(h) { + h = h < 0 ? h + 1 : (h > 1 ? h - 1 : h); + if (h * 6 < 1) return m1 + (m2 - m1) * h * 6; + else if (h * 2 < 1) return m2; + else if (h * 3 < 2) return m1 + (m2 - m1) * (2/3 - h) * 6; + else return m1; + } + }, + hue: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().h)); + }, + saturation: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().s * 100), '%'); + }, + lightness: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().l * 100), '%'); + }, + alpha: function (color) { + return new(tree.Dimension)(color.toHSL().a); + }, + saturate: function (color, amount) { + var hsl = color.toHSL(); + + hsl.s += amount.value / 100; + hsl.s = clamp(hsl.s); + return hsla(hsl); + }, + desaturate: function (color, amount) { + var hsl = color.toHSL(); + + hsl.s -= amount.value / 100; + hsl.s = clamp(hsl.s); + return hsla(hsl); + }, + lighten: function (color, amount) { + var hsl = color.toHSL(); + + hsl.l += amount.value / 100; + hsl.l = clamp(hsl.l); + return hsla(hsl); + }, + darken: function (color, amount) { + var hsl = color.toHSL(); + + hsl.l -= amount.value / 100; + hsl.l = clamp(hsl.l); + return hsla(hsl); + }, + fadein: function (color, amount) { + var hsl = color.toHSL(); + + hsl.a += amount.value / 100; + hsl.a = clamp(hsl.a); + return hsla(hsl); + }, + fadeout: function (color, amount) { + var hsl = color.toHSL(); + + hsl.a -= amount.value / 100; + hsl.a = clamp(hsl.a); + return hsla(hsl); + }, + spin: function (color, amount) { + var hsl = color.toHSL(); + var hue = (hsl.h + amount.value) % 360; + + hsl.h = hue < 0 ? 360 + hue : hue; + + return hsla(hsl); + }, + // + // Copyright (c) 2006-2009 Hampton Catlin, Nathan Weizenbaum, and Chris Eppstein + // http://sass-lang.com + // + mix: function (color1, color2, weight) { + var p = weight.value / 100.0; + var w = p * 2 - 1; + var a = color1.toHSL().a - color2.toHSL().a; + + var w1 = (((w * a == -1) ? w : (w + a) / (1 + w * a)) + 1) / 2.0; + var w2 = 1 - w1; + + var rgb = [color1.rgb[0] * w1 + color2.rgb[0] * w2, + color1.rgb[1] * w1 + color2.rgb[1] * w2, + color1.rgb[2] * w1 + color2.rgb[2] * w2]; + + var alpha = color1.alpha * p + color2.alpha * (1 - p); + + return new(tree.Color)(rgb, alpha); + }, + greyscale: function (color) { + return this.desaturate(color, new(tree.Dimension)(100)); + }, + e: function (str) { + return new(tree.Anonymous)(str instanceof tree.JavaScript ? str.evaluated : str); + }, + escape: function (str) { + return new(tree.Anonymous)(encodeURI(str.value).replace(/=/g, "%3D").replace(/:/g, "%3A").replace(/#/g, "%23").replace(/;/g, "%3B").replace(/\(/g, "%28").replace(/\)/g, "%29")); + }, + '%': function (quoted /* arg, arg, ...*/) { + var args = Array.prototype.slice.call(arguments, 1), + str = quoted.value; + + for (var i = 0; i < args.length; i++) { + str = str.replace(/%[sda]/i, function(token) { + var value = token.match(/s/i) ? args[i].value : args[i].toCSS(); + return token.match(/[A-Z]$/) ? encodeURIComponent(value) : value; + }); + } + str = str.replace(/%%/g, '%'); + return new(tree.Quoted)('"' + str + '"', str); + }, + round: function (n) { + if (n instanceof tree.Dimension) { + return new(tree.Dimension)(Math.round(number(n)), n.unit); + } else if (typeof(n) === 'number') { + return Math.round(n); + } else { + throw { + error: "RuntimeError", + message: "math functions take numbers as parameters" + }; + } + } +}; + +function hsla(hsla) { + return tree.functions.hsla(hsla.h, hsla.s, hsla.l, hsla.a); +} + +function number(n) { + if (n instanceof tree.Dimension) { + return parseFloat(n.unit == '%' ? n.value / 100 : n.value); + } else if (typeof(n) === 'number') { + return n; + } else { + throw { + error: "RuntimeError", + message: "color functions take numbers as parameters" + }; + } +} + +function clamp(val) { + return Math.min(1, Math.max(0, val)); +} + +})(require('less/tree')); +(function (tree) { + +tree.Alpha = function (val) { + this.value = val; +}; +tree.Alpha.prototype = { + toCSS: function () { + return "alpha(opacity=" + + (this.value.toCSS ? this.value.toCSS() : this.value) + ")"; + }, + eval: function (env) { + if (this.value.eval) { this.value = this.value.eval(env) } + return this; + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Anonymous = function (string) { + this.value = string.value || string; +}; +tree.Anonymous.prototype = { + toCSS: function () { + return this.value; + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +// +// A function call node. +// +tree.Call = function (name, args, index) { + this.name = name; + this.args = args; + this.index = index; +}; +tree.Call.prototype = { + // + // When evaluating a function call, + // we either find the function in `tree.functions` [1], + // in which case we call it, passing the evaluated arguments, + // or we simply print it out as it appeared originally [2]. + // + // The *functions.js* file contains the built-in functions. + // + // The reason why we evaluate the arguments, is in the case where + // we try to pass a variable to a function, like: `saturate(@color)`. + // The function should receive the value, not the variable. + // + eval: function (env) { + var args = this.args.map(function (a) { return a.eval(env) }); + + if (this.name in tree.functions) { // 1. + try { + return tree.functions[this.name].apply(tree.functions, args); + } catch (e) { + throw { message: "error evaluating function `" + this.name + "`", + index: this.index }; + } + } else { // 2. + return new(tree.Anonymous)(this.name + + "(" + args.map(function (a) { return a.toCSS() }).join(', ') + ")"); + } + }, + + toCSS: function (env) { + return this.eval(env).toCSS(); + } +}; + +})(require('less/tree')); +(function (tree) { +// +// RGB Colors - #ff0014, #eee +// +tree.Color = function (rgb, a) { + // + // The end goal here, is to parse the arguments + // into an integer triplet, such as `128, 255, 0` + // + // This facilitates operations and conversions. + // + if (Array.isArray(rgb)) { + this.rgb = rgb; + } else if (rgb.length == 6) { + this.rgb = rgb.match(/.{2}/g).map(function (c) { + return parseInt(c, 16); + }); + } else if (rgb.length == 8) { + this.alpha = parseInt(rgb.substring(0,2), 16) / 255.0; + this.rgb = rgb.substr(2).match(/.{2}/g).map(function (c) { + return parseInt(c, 16); + }); + } else { + this.rgb = rgb.split('').map(function (c) { + return parseInt(c + c, 16); + }); + } + this.alpha = typeof(a) === 'number' ? a : 1; +}; +tree.Color.prototype = { + eval: function () { return this }, + + // + // If we have some transparency, the only way to represent it + // is via `rgba`. Otherwise, we use the hex representation, + // which has better compatibility with older browsers. + // Values are capped between `0` and `255`, rounded and zero-padded. + // + toCSS: function () { + if (this.alpha < 1.0) { + return "rgba(" + this.rgb.map(function (c) { + return Math.round(c); + }).concat(this.alpha).join(', ') + ")"; + } else { + return '#' + this.rgb.map(function (i) { + i = Math.round(i); + i = (i > 255 ? 255 : (i < 0 ? 0 : i)).toString(16); + return i.length === 1 ? '0' + i : i; + }).join(''); + } + }, + + // + // Operations have to be done per-channel, if not, + // channels will spill onto each other. Once we have + // our result, in the form of an integer triplet, + // we create a new Color node to hold the result. + // + operate: function (op, other) { + var result = []; + + if (! (other instanceof tree.Color)) { + other = other.toColor(); + } + + for (var c = 0; c < 3; c++) { + result[c] = tree.operate(op, this.rgb[c], other.rgb[c]); + } + return new(tree.Color)(result, this.alpha + other.alpha); + }, + + toHSL: function () { + var r = this.rgb[0] / 255, + g = this.rgb[1] / 255, + b = this.rgb[2] / 255, + a = this.alpha; + + var max = Math.max(r, g, b), min = Math.min(r, g, b); + var h, s, l = (max + min) / 2, d = max - min; + + if (max === min) { + h = s = 0; + } else { + s = l > 0.5 ? d / (2 - max - min) : d / (max + min); + + switch (max) { + case r: h = (g - b) / d + (g < b ? 6 : 0); break; + case g: h = (b - r) / d + 2; break; + case b: h = (r - g) / d + 4; break; + } + h /= 6; + } + return { h: h * 360, s: s, l: l, a: a }; + } +}; + + +})(require('less/tree')); +(function (tree) { + +tree.Comment = function (value, silent) { + this.value = value; + this.silent = !!silent; +}; +tree.Comment.prototype = { + toCSS: function (env) { + return env.compress ? '' : this.value; + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +// +// A number with a unit +// +tree.Dimension = function (value, unit) { + this.value = parseFloat(value); + this.unit = unit || null; +}; + +tree.Dimension.prototype = { + eval: function () { return this }, + toColor: function () { + return new(tree.Color)([this.value, this.value, this.value]); + }, + toCSS: function () { + var css = this.value + this.unit; + return css; + }, + + // In an operation between two Dimensions, + // we default to the first Dimension's unit, + // so `1px + 2em` will yield `3px`. + // In the future, we could implement some unit + // conversions such that `100cm + 10mm` would yield + // `101cm`. + operate: function (op, other) { + return new(tree.Dimension) + (tree.operate(op, this.value, other.value), + this.unit || other.unit); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Directive = function (name, value) { + this.name = name; + if (Array.isArray(value)) { + this.ruleset = new(tree.Ruleset)([], value); + } else { + this.value = value; + } +}; +tree.Directive.prototype = { + toCSS: function (ctx, env) { + if (this.ruleset) { + this.ruleset.root = true; + return this.name + (env.compress ? '{' : ' {\n ') + + this.ruleset.toCSS(ctx, env).trim().replace(/\n/g, '\n ') + + (env.compress ? '}': '\n}\n'); + } else { + return this.name + ' ' + this.value.toCSS() + ';\n'; + } + }, + eval: function (env) { + env.frames.unshift(this); + this.ruleset = this.ruleset && this.ruleset.eval(env); + env.frames.shift(); + return this; + }, + variable: function (name) { return tree.Ruleset.prototype.variable.call(this.ruleset, name) }, + find: function () { return tree.Ruleset.prototype.find.apply(this.ruleset, arguments) }, + rulesets: function () { return tree.Ruleset.prototype.rulesets.apply(this.ruleset) } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Element = function (combinator, value) { + this.combinator = combinator instanceof tree.Combinator ? + combinator : new(tree.Combinator)(combinator); + this.value = value.trim(); +}; +tree.Element.prototype.toCSS = function (env) { + return this.combinator.toCSS(env || {}) + this.value; +}; + +tree.Combinator = function (value) { + if (value === ' ') { + this.value = ' '; + } else { + this.value = value ? value.trim() : ""; + } +}; +tree.Combinator.prototype.toCSS = function (env) { + return { + '' : '', + ' ' : ' ', + '&' : '', + ':' : ' :', + '::': '::', + '+' : env.compress ? '+' : ' + ', + '~' : env.compress ? '~' : ' ~ ', + '>' : env.compress ? '>' : ' > ' + }[this.value]; +}; + +})(require('less/tree')); +(function (tree) { + +tree.Expression = function (value) { this.value = value }; +tree.Expression.prototype = { + eval: function (env) { + if (this.value.length > 1) { + return new(tree.Expression)(this.value.map(function (e) { + return e.eval(env); + })); + } else if (this.value.length === 1) { + return this.value[0].eval(env); + } else { + return this; + } + }, + toCSS: function (env) { + return this.value.map(function (e) { + return e.toCSS(env); + }).join(' '); + } +}; + +})(require('less/tree')); +(function (tree) { +// +// CSS @import node +// +// The general strategy here is that we don't want to wait +// for the parsing to be completed, before we start importing +// the file. That's because in the context of a browser, +// most of the time will be spent waiting for the server to respond. +// +// On creation, we push the import path to our import queue, though +// `import,push`, we also pass it a callback, which it'll call once +// the file has been fetched, and parsed. +// +tree.Import = function (path, imports) { + var that = this; + + this._path = path; + + // The '.less' extension is optional + if (path instanceof tree.Quoted) { + this.path = /\.(le?|c)ss$/.test(path.value) ? path.value : path.value + '.less'; + } else { + this.path = path.value.value || path.value; + } + + this.css = /css$/.test(this.path); + + // Only pre-compile .less files + if (! this.css) { + imports.push(this.path, function (root) { + if (! root) { + throw new(Error)("Error parsing " + that.path); + } + that.root = root; + }); + } +}; + +// +// The actual import node doesn't return anything, when converted to CSS. +// The reason is that it's used at the evaluation stage, so that the rules +// it imports can be treated like any other rules. +// +// In `eval`, we make sure all Import nodes get evaluated, recursively, so +// we end up with a flat structure, which can easily be imported in the parent +// ruleset. +// +tree.Import.prototype = { + toCSS: function () { + if (this.css) { + return "@import " + this._path.toCSS() + ';\n'; + } else { + return ""; + } + }, + eval: function (env) { + var ruleset; + + if (this.css) { + return this; + } else { + ruleset = new(tree.Ruleset)(null, this.root.rules.slice(0)); + + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.Import) { + Array.prototype + .splice + .apply(ruleset.rules, + [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + return ruleset.rules; + } + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.JavaScript = function (string, index, escaped) { + this.escaped = escaped; + this.expression = string; + this.index = index; +}; +tree.JavaScript.prototype = { + eval: function (env) { + var result, + that = this, + context = {}; + + var expression = this.expression.replace(/@\{([\w-]+)\}/g, function (_, name) { + return tree.jsify(new(tree.Variable)('@' + name, that.index).eval(env)); + }); + + try { + expression = new(Function)('return (' + expression + ')'); + } catch (e) { + throw { message: "JavaScript evaluation error: `" + expression + "`" , + index: this.index }; + } + + for (var k in env.frames[0].variables()) { + context[k.slice(1)] = { + value: env.frames[0].variables()[k].value, + toJS: function () { + return this.value.eval(env).toCSS(); + } + }; + } + + try { + result = expression.call(context); + } catch (e) { + throw { message: "JavaScript evaluation error: '" + e.name + ': ' + e.message + "'" , + index: this.index }; + } + if (typeof(result) === 'string') { + return new(tree.Quoted)('"' + result + '"', result, this.escaped, this.index); + } else if (Array.isArray(result)) { + return new(tree.Anonymous)(result.join(', ')); + } else { + return new(tree.Anonymous)(result); + } + } +}; + +})(require('less/tree')); + +(function (tree) { + +tree.Keyword = function (value) { this.value = value }; +tree.Keyword.prototype = { + eval: function () { return this }, + toCSS: function () { return this.value } +}; + +})(require('less/tree')); +(function (tree) { + +tree.mixin = {}; +tree.mixin.Call = function (elements, args, index) { + this.selector = new(tree.Selector)(elements); + this.arguments = args; + this.index = index; +}; +tree.mixin.Call.prototype = { + eval: function (env) { + var mixins, args, rules = [], match = false; + + for (var i = 0; i < env.frames.length; i++) { + if ((mixins = env.frames[i].find(this.selector)).length > 0) { + args = this.arguments && this.arguments.map(function (a) { return a.eval(env) }); + for (var m = 0; m < mixins.length; m++) { + if (mixins[m].match(args, env)) { + try { + Array.prototype.push.apply( + rules, mixins[m].eval(env, this.arguments).rules); + match = true; + } catch (e) { + throw { message: e.message, index: e.index, stack: e.stack, call: this.index }; + } + } + } + if (match) { + return rules; + } else { + throw { message: 'No matching definition was found for `' + + this.selector.toCSS().trim() + '(' + + this.arguments.map(function (a) { + return a.toCSS(); + }).join(', ') + ")`", + index: this.index }; + } + } + } + throw { message: this.selector.toCSS().trim() + " is undefined", + index: this.index }; + } +}; + +tree.mixin.Definition = function (name, params, rules) { + this.name = name; + this.selectors = [new(tree.Selector)([new(tree.Element)(null, name)])]; + this.params = params; + this.arity = params.length; + this.rules = rules; + this._lookups = {}; + this.required = params.reduce(function (count, p) { + if (!p.name || (p.name && !p.value)) { return count + 1 } + else { return count } + }, 0); + this.parent = tree.Ruleset.prototype; + this.frames = []; +}; +tree.mixin.Definition.prototype = { + toCSS: function () { return "" }, + variable: function (name) { return this.parent.variable.call(this, name) }, + variables: function () { return this.parent.variables.call(this) }, + find: function () { return this.parent.find.apply(this, arguments) }, + rulesets: function () { return this.parent.rulesets.apply(this) }, + + eval: function (env, args) { + var frame = new(tree.Ruleset)(null, []), context, _arguments = []; + + for (var i = 0, val; i < this.params.length; i++) { + if (this.params[i].name) { + if (val = (args && args[i]) || this.params[i].value) { + frame.rules.unshift(new(tree.Rule)(this.params[i].name, val.eval(env))); + } else { + throw { message: "wrong number of arguments for " + this.name + + ' (' + args.length + ' for ' + this.arity + ')' }; + } + } + } + for (var i = 0; i < Math.max(this.params.length, args && args.length); i++) { + _arguments.push(args[i] || this.params[i].value); + } + frame.rules.unshift(new(tree.Rule)('@arguments', new(tree.Expression)(_arguments).eval(env))); + + return new(tree.Ruleset)(null, this.rules.slice(0)).eval({ + frames: [this, frame].concat(this.frames, env.frames) + }); + }, + match: function (args, env) { + var argsLength = (args && args.length) || 0, len; + + if (argsLength < this.required) { return false } + if ((this.required > 0) && (argsLength > this.params.length)) { return false } + + len = Math.min(argsLength, this.arity); + + for (var i = 0; i < len; i++) { + if (!this.params[i].name) { + if (args[i].eval(env).toCSS() != this.params[i].value.eval(env).toCSS()) { + return false; + } + } + } + return true; + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Operation = function (op, operands) { + this.op = op.trim(); + this.operands = operands; +}; +tree.Operation.prototype.eval = function (env) { + var a = this.operands[0].eval(env), + b = this.operands[1].eval(env), + temp; + + if (a instanceof tree.Dimension && b instanceof tree.Color) { + if (this.op === '*' || this.op === '+') { + temp = b, b = a, a = temp; + } else { + throw { name: "OperationError", + message: "Can't substract or divide a color from a number" }; + } + } + return a.operate(this.op, b); +}; + +tree.operate = function (op, a, b) { + switch (op) { + case '+': return a + b; + case '-': return a - b; + case '*': return a * b; + case '/': return a / b; + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Quoted = function (str, content, escaped, i) { + this.escaped = escaped; + this.value = content || ''; + this.quote = str.charAt(0); + this.index = i; +}; +tree.Quoted.prototype = { + toCSS: function () { + if (this.escaped) { + return this.value; + } else { + return this.quote + this.value + this.quote; + } + }, + eval: function (env) { + var that = this; + var value = this.value.replace(/`([^`]+)`/g, function (_, exp) { + return new(tree.JavaScript)(exp, that.index, true).eval(env).value; + }).replace(/@\{([\w-]+)\}/g, function (_, name) { + var v = new(tree.Variable)('@' + name, that.index).eval(env); + return v.value || v.toCSS(); + }); + return new(tree.Quoted)(this.quote + value + this.quote, value, this.escaped, this.index); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Rule = function (name, value, important, index) { + this.name = name; + this.value = (value instanceof tree.Value) ? value : new(tree.Value)([value]); + this.important = important ? ' ' + important.trim() : ''; + this.index = index; + + if (name.charAt(0) === '@') { + this.variable = true; + } else { this.variable = false } +}; +tree.Rule.prototype.toCSS = function (env) { + if (this.variable) { return "" } + else { + return this.name + (env.compress ? ':' : ': ') + + this.value.toCSS(env) + + this.important + ";"; + } +}; + +tree.Rule.prototype.eval = function (context) { + return new(tree.Rule)(this.name, this.value.eval(context), this.important, this.index); +}; + +tree.Shorthand = function (a, b) { + this.a = a; + this.b = b; +}; + +tree.Shorthand.prototype = { + toCSS: function (env) { + return this.a.toCSS(env) + "/" + this.b.toCSS(env); + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Ruleset = function (selectors, rules) { + this.selectors = selectors; + this.rules = rules; + this._lookups = {}; +}; +tree.Ruleset.prototype = { + eval: function (env) { + var ruleset = new(tree.Ruleset)(this.selectors, this.rules.slice(0)); + + ruleset.root = this.root; + + // push the current ruleset to the frames stack + env.frames.unshift(ruleset); + + // Evaluate imports + if (ruleset.root) { + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.Import) { + Array.prototype.splice + .apply(ruleset.rules, [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + } + + // Store the frames around mixin definitions, + // so they can be evaluated like closures when the time comes. + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.mixin.Definition) { + ruleset.rules[i].frames = env.frames.slice(0); + } + } + + // Evaluate mixin calls. + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.mixin.Call) { + Array.prototype.splice + .apply(ruleset.rules, [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + + // Evaluate everything else + for (var i = 0, rule; i < ruleset.rules.length; i++) { + rule = ruleset.rules[i]; + + if (! (rule instanceof tree.mixin.Definition)) { + ruleset.rules[i] = rule.eval ? rule.eval(env) : rule; + } + } + + // Pop the stack + env.frames.shift(); + + return ruleset; + }, + match: function (args) { + return !args || args.length === 0; + }, + variables: function () { + if (this._variables) { return this._variables } + else { + return this._variables = this.rules.reduce(function (hash, r) { + if (r instanceof tree.Rule && r.variable === true) { + hash[r.name] = r; + } + return hash; + }, {}); + } + }, + variable: function (name) { + return this.variables()[name]; + }, + rulesets: function () { + if (this._rulesets) { return this._rulesets } + else { + return this._rulesets = this.rules.filter(function (r) { + return (r instanceof tree.Ruleset) || (r instanceof tree.mixin.Definition); + }); + } + }, + find: function (selector, self) { + self = self || this; + var rules = [], rule, match, + key = selector.toCSS(); + + if (key in this._lookups) { return this._lookups[key] } + + this.rulesets().forEach(function (rule) { + if (rule !== self) { + for (var j = 0; j < rule.selectors.length; j++) { + if (match = selector.match(rule.selectors[j])) { + if (selector.elements.length > 1) { + Array.prototype.push.apply(rules, rule.find( + new(tree.Selector)(selector.elements.slice(1)), self)); + } else { + rules.push(rule); + } + break; + } + } + } + }); + return this._lookups[key] = rules; + }, + // + // Entry point for code generation + // + // `context` holds an array of arrays. + // + toCSS: function (context, env) { + var css = [], // The CSS output + rules = [], // node.Rule instances + rulesets = [], // node.Ruleset instances + paths = [], // Current selectors + selector, // The fully rendered selector + rule; + + if (! this.root) { + if (context.length === 0) { + paths = this.selectors.map(function (s) { return [s] }); + } else { + for (var s = 0; s < this.selectors.length; s++) { + for (var c = 0; c < context.length; c++) { + paths.push(context[c].concat([this.selectors[s]])); + } + } + } + } + + // Compile rules and rulesets + for (var i = 0; i < this.rules.length; i++) { + rule = this.rules[i]; + + if (rule.rules || (rule instanceof tree.Directive)) { + rulesets.push(rule.toCSS(paths, env)); + } else if (rule instanceof tree.Comment) { + if (!rule.silent) { + if (this.root) { + rulesets.push(rule.toCSS(env)); + } else { + rules.push(rule.toCSS(env)); + } + } + } else { + if (rule.toCSS && !rule.variable) { + rules.push(rule.toCSS(env)); + } else if (rule.value && !rule.variable) { + rules.push(rule.value.toString()); + } + } + } + + rulesets = rulesets.join(''); + + // If this is the root node, we don't render + // a selector, or {}. + // Otherwise, only output if this ruleset has rules. + if (this.root) { + css.push(rules.join(env.compress ? '' : '\n')); + } else { + if (rules.length > 0) { + selector = paths.map(function (p) { + return p.map(function (s) { + return s.toCSS(env); + }).join('').trim(); + }).join(env.compress ? ',' : (paths.length > 3 ? ',\n' : ', ')); + css.push(selector, + (env.compress ? '{' : ' {\n ') + + rules.join(env.compress ? '' : '\n ') + + (env.compress ? '}' : '\n}\n')); + } + } + css.push(rulesets); + + return css.join('') + (env.compress ? '\n' : ''); + } +}; +})(require('less/tree')); +(function (tree) { + +tree.Selector = function (elements) { + this.elements = elements; + if (this.elements[0].combinator.value === "") { + this.elements[0].combinator.value = ' '; + } +}; +tree.Selector.prototype.match = function (other) { + if (this.elements[0].value === other.elements[0].value) { + return true; + } else { + return false; + } +}; +tree.Selector.prototype.toCSS = function (env) { + if (this._css) { return this._css } + + return this._css = this.elements.map(function (e) { + if (typeof(e) === 'string') { + return ' ' + e.trim(); + } else { + return e.toCSS(env); + } + }).join(''); +}; + +})(require('less/tree')); +(function (tree) { + +tree.URL = function (val, paths) { + if (val.data) { + this.attrs = val; + } else { + // Add the base path if the URL is relative and we are in the browser + if (!/^(?:https?:\/|file:\/|data:\/)?\//.test(val.value) && paths.length > 0 && typeof(window) !== 'undefined') { + val.value = paths[0] + (val.value.charAt(0) === '/' ? val.value.slice(1) : val.value); + } + this.value = val; + this.paths = paths; + } +}; +tree.URL.prototype = { + toCSS: function () { + return "url(" + (this.attrs ? 'data:' + this.attrs.mime + this.attrs.charset + this.attrs.base64 + this.attrs.data + : this.value.toCSS()) + ")"; + }, + eval: function (ctx) { + return this.attrs ? this : new(tree.URL)(this.value.eval(ctx), this.paths); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Value = function (value) { + this.value = value; + this.is = 'value'; +}; +tree.Value.prototype = { + eval: function (env) { + if (this.value.length === 1) { + return this.value[0].eval(env); + } else { + return new(tree.Value)(this.value.map(function (v) { + return v.eval(env); + })); + } + }, + toCSS: function (env) { + return this.value.map(function (e) { + return e.toCSS(env); + }).join(env.compress ? ',' : ', '); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Variable = function (name, index) { this.name = name, this.index = index }; +tree.Variable.prototype = { + eval: function (env) { + var variable, v, name = this.name; + + if (name.indexOf('@@') == 0) { + name = '@' + new(tree.Variable)(name.slice(1)).eval(env).value; + } + + if (variable = tree.find(env.frames, function (frame) { + if (v = frame.variable(name)) { + return v.value.eval(env); + } + })) { return variable } + else { + throw { message: "variable " + name + " is undefined", + index: this.index }; + } + } +}; + +})(require('less/tree')); +require('less/tree').find = function (obj, fun) { + for (var i = 0, r; i < obj.length; i++) { + if (r = fun.call(obj, obj[i])) { return r } + } + return null; +}; +require('less/tree').jsify = function (obj) { + if (Array.isArray(obj.value) && (obj.value.length > 1)) { + return '[' + obj.value.map(function (v) { return v.toCSS(false) }).join(', ') + ']'; + } else { + return obj.toCSS(false); + } +}; +// +// browser.js - client-side engine +// + +var isFileProtocol = (location.protocol === 'file:' || + location.protocol === 'chrome:' || + location.protocol === 'chrome-extension:' || + location.protocol === 'resource:'); + +less.env = less.env || (location.hostname == '127.0.0.1' || + location.hostname == '0.0.0.0' || + location.hostname == 'localhost' || + location.port.length > 0 || + isFileProtocol ? 'development' + : 'production'); + +// Load styles asynchronously (default: false) +// +// This is set to `false` by default, so that the body +// doesn't start loading before the stylesheets are parsed. +// Setting this to `true` can result in flickering. +// +less.async = false; + +// Interval between watch polls +less.poll = less.poll || (isFileProtocol ? 1000 : 1500); + +// +// Watch mode +// +less.watch = function () { return this.watchMode = true }; +less.unwatch = function () { return this.watchMode = false }; + +if (less.env === 'development') { + less.optimization = 0; + + if (/!watch/.test(location.hash)) { + less.watch(); + } + less.watchTimer = setInterval(function () { + if (less.watchMode) { + loadStyleSheets(function (root, sheet, env) { + if (root) { + createCSS(root.toCSS(), sheet, env.lastModified); + } + }); + } + }, less.poll); +} else { + less.optimization = 3; +} + +var cache; + +try { + cache = (typeof(window.localStorage) === 'undefined') ? null : window.localStorage; +} catch (_) { + cache = null; +} + +// +// Get all tags with the 'rel' attribute set to "stylesheet/less" +// +var links = document.getElementsByTagName('link'); +var typePattern = /^text\/(x-)?less$/; + +less.sheets = []; + +for (var i = 0; i < links.length; i++) { + if (links[i].rel === 'stylesheet/less' || (links[i].rel.match(/stylesheet/) && + (links[i].type.match(typePattern)))) { + less.sheets.push(links[i]); + } +} + + +less.refresh = function (reload) { + var startTime, endTime; + startTime = endTime = new(Date); + + loadStyleSheets(function (root, sheet, env) { + if (env.local) { + log("loading " + sheet.href + " from cache."); + } else { + log("parsed " + sheet.href + " successfully."); + createCSS(root.toCSS(), sheet, env.lastModified); + } + log("css for " + sheet.href + " generated in " + (new(Date) - endTime) + 'ms'); + (env.remaining === 0) && log("css generated in " + (new(Date) - startTime) + 'ms'); + endTime = new(Date); + }, reload); + + loadStyles(); +}; +less.refreshStyles = loadStyles; + +less.refresh(less.env === 'development'); + +function loadStyles() { + var styles = document.getElementsByTagName('style'); + for (var i = 0; i < styles.length; i++) { + if (styles[i].type.match(typePattern)) { + new(less.Parser)().parse(styles[i].innerHTML || '', function (e, tree) { + styles[i].type = 'text/css'; + styles[i].innerHTML = tree.toCSS(); + }); + } + } +} + +function loadStyleSheets(callback, reload) { + for (var i = 0; i < less.sheets.length; i++) { + loadStyleSheet(less.sheets[i], callback, reload, less.sheets.length - (i + 1)); + } +} + +function loadStyleSheet(sheet, callback, reload, remaining) { + var url = window.location.href.replace(/[#?].*$/, ''); + var href = sheet.href.replace(/\?.*$/, ''); + var css = cache && cache.getItem(href); + var timestamp = cache && cache.getItem(href + ':timestamp'); + var styles = { css: css, timestamp: timestamp }; + + // Stylesheets in IE don't always return the full path + if (! /^(https?|file):/.test(href)) { + if (href.charAt(0) == "/") { + href = window.location.protocol + "//" + window.location.host + href; + } else { + href = url.slice(0, url.lastIndexOf('/') + 1) + href; + } + } + + xhr(sheet.href, sheet.type, function (data, lastModified) { + if (!reload && styles && lastModified && + (new(Date)(lastModified).valueOf() === + new(Date)(styles.timestamp).valueOf())) { + // Use local copy + createCSS(styles.css, sheet); + callback(null, sheet, { local: true, remaining: remaining }); + } else { + // Use remote copy (re-parse) + try { + new(less.Parser)({ + optimization: less.optimization, + paths: [href.replace(/[\w\.-]+$/, '')], + mime: sheet.type + }).parse(data, function (e, root) { + if (e) { return error(e, href) } + try { + callback(root, sheet, { local: false, lastModified: lastModified, remaining: remaining }); + removeNode(document.getElementById('less-error-message:' + extractId(href))); + } catch (e) { + error(e, href); + } + }); + } catch (e) { + error(e, href); + } + } + }, function (status, url) { + throw new(Error)("Couldn't load " + url + " (" + status + ")"); + }); +} + +function extractId(href) { + return href.replace(/^[a-z]+:\/\/?[^\/]+/, '' ) // Remove protocol & domain + .replace(/^\//, '' ) // Remove root / + .replace(/\?.*$/, '' ) // Remove query + .replace(/\.[^\.\/]+$/, '' ) // Remove file extension + .replace(/[^\.\w-]+/g, '-') // Replace illegal characters + .replace(/\./g, ':'); // Replace dots with colons(for valid id) +} + +function createCSS(styles, sheet, lastModified) { + var css; + + // Strip the query-string + var href = sheet.href ? sheet.href.replace(/\?.*$/, '') : ''; + + // If there is no title set, use the filename, minus the extension + var id = 'less:' + (sheet.title || extractId(href)); + + // If the stylesheet doesn't exist, create a new node + if ((css = document.getElementById(id)) === null) { + css = document.createElement('style'); + css.type = 'text/css'; + css.media = sheet.media || 'screen'; + css.id = id; + document.getElementsByTagName('head')[0].appendChild(css); + } + + if (css.styleSheet) { // IE + try { + css.styleSheet.cssText = styles; + } catch (e) { + throw new(Error)("Couldn't reassign styleSheet.cssText."); + } + } else { + (function (node) { + if (css.childNodes.length > 0) { + if (css.firstChild.nodeValue !== node.nodeValue) { + css.replaceChild(node, css.firstChild); + } + } else { + css.appendChild(node); + } + })(document.createTextNode(styles)); + } + + // Don't update the local store if the file wasn't modified + if (lastModified && cache) { + log('saving ' + href + ' to cache.'); + cache.setItem(href, styles); + cache.setItem(href + ':timestamp', lastModified); + } +} + +function xhr(url, type, callback, errback) { + var xhr = getXMLHttpRequest(); + var async = isFileProtocol ? false : less.async; + + if (typeof(xhr.overrideMimeType) === 'function') { + xhr.overrideMimeType('text/css'); + } + xhr.open('GET', url, async); + xhr.setRequestHeader('Accept', type || 'text/x-less, text/css; q=0.9, */*; q=0.5'); + xhr.send(null); + + if (isFileProtocol) { + if (xhr.status === 0) { + callback(xhr.responseText); + } else { + errback(xhr.status, url); + } + } else if (async) { + xhr.onreadystatechange = function () { + if (xhr.readyState == 4) { + handleResponse(xhr, callback, errback); + } + }; + } else { + handleResponse(xhr, callback, errback); + } + + function handleResponse(xhr, callback, errback) { + if (xhr.status >= 200 && xhr.status < 300) { + callback(xhr.responseText, + xhr.getResponseHeader("Last-Modified")); + } else if (typeof(errback) === 'function') { + errback(xhr.status, url); + } + } +} + +function getXMLHttpRequest() { + if (window.XMLHttpRequest) { + return new(XMLHttpRequest); + } else { + try { + return new(ActiveXObject)("MSXML2.XMLHTTP.3.0"); + } catch (e) { + log("browser doesn't support AJAX."); + return null; + } + } +} + +function removeNode(node) { + return node && node.parentNode.removeChild(node); +} + +function log(str) { + if (less.env == 'development' && typeof(console) !== "undefined") { console.log('less: ' + str) } +} + +function error(e, href) { + var id = 'less-error-message:' + extractId(href); + + var template = ['
      ', + '
    • {0}
    • ', + '
    • {current}
    • ', + '
    • {2}
    • ', + '
    '].join('\n'); + + var elem = document.createElement('div'), timer, content; + + elem.id = id; + elem.className = "less-error-message"; + + content = '

    ' + (e.message || 'There is an error in your .less file') + + '

    ' + '

    ' + href + " "; + + if (e.extract) { + content += 'on line ' + e.line + ', column ' + (e.column + 1) + ':

    ' + + template.replace(/\[(-?\d)\]/g, function (_, i) { + return (parseInt(e.line) + parseInt(i)) || ''; + }).replace(/\{(\d)\}/g, function (_, i) { + return e.extract[parseInt(i)] || ''; + }).replace(/\{current\}/, e.extract[1].slice(0, e.column) + '' + + e.extract[1].slice(e.column) + ''); + } + elem.innerHTML = content; + + // CSS for error messages + createCSS([ + '.less-error-message ul, .less-error-message li {', + 'list-style-type: none;', + 'margin-right: 15px;', + 'padding: 4px 0;', + 'margin: 0;', + '}', + '.less-error-message label {', + 'font-size: 12px;', + 'margin-right: 15px;', + 'padding: 4px 0;', + 'color: #cc7777;', + '}', + '.less-error-message pre {', + 'color: #ee4444;', + 'padding: 4px 0;', + 'margin: 0;', + 'display: inline-block;', + '}', + '.less-error-message pre.ctx {', + 'color: #dd4444;', + '}', + '.less-error-message h3 {', + 'font-size: 20px;', + 'font-weight: bold;', + 'padding: 15px 0 5px 0;', + 'margin: 0;', + '}', + '.less-error-message a {', + 'color: #10a', + '}', + '.less-error-message .error {', + 'color: red;', + 'font-weight: bold;', + 'padding-bottom: 2px;', + 'border-bottom: 1px dashed red;', + '}' + ].join('\n'), { title: 'error-message' }); + + elem.style.cssText = [ + "font-family: Arial, sans-serif", + "border: 1px solid #e00", + "background-color: #eee", + "border-radius: 5px", + "-webkit-border-radius: 5px", + "-moz-border-radius: 5px", + "color: #e00", + "padding: 15px", + "margin-bottom: 15px" + ].join(';'); + + if (less.env == 'development') { + timer = setInterval(function () { + if (document.body) { + if (document.getElementById(id)) { + document.body.replaceChild(elem, document.getElementById(id)); + } else { + document.body.insertBefore(elem, document.body.firstChild); + } + clearInterval(timer); + } + }, 10); + } +} + +})(window); diff --git a/src/dashboard/src/media/vendor/less.js/dist/less-1.1.3.min.js b/src/dashboard/src/media/vendor/less.js/dist/less-1.1.3.min.js new file mode 100644 index 0000000000..6e4d5cff54 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/dist/less-1.1.3.min.js @@ -0,0 +1,16 @@ +// +// LESS - Leaner CSS v1.1.3 +// http://lesscss.org +// +// Copyright (c) 2009-2011, Alexis Sellier +// Licensed under the Apache 2.0 License. +// +// +// LESS - Leaner CSS v1.1.3 +// http://lesscss.org +// +// Copyright (c) 2009-2011, Alexis Sellier +// Licensed under the Apache 2.0 License. +// +(function(a,b){function v(a,b){var c="less-error-message:"+p(b),e=["
      ",'
    • {0}
    • ',"
    • {current}
    • ",'
    • {2}
    • ',"
    "].join("\n"),f=document.createElement("div"),g,h;f.id=c,f.className="less-error-message",h="

    "+(a.message||"There is an error in your .less file")+"

    "+'

    '+b+" ",a.extract&&(h+="on line "+a.line+", column "+(a.column+1)+":

    "+e.replace(/\[(-?\d)\]/g,function(b,c){return parseInt(a.line)+parseInt(c)||""}).replace(/\{(\d)\}/g,function(b,c){return a.extract[parseInt(c)]||""}).replace(/\{current\}/,a.extract[1].slice(0,a.column)+''+a.extract[1].slice(a.column)+"")),f.innerHTML=h,q([".less-error-message ul, .less-error-message li {","list-style-type: none;","margin-right: 15px;","padding: 4px 0;","margin: 0;","}",".less-error-message label {","font-size: 12px;","margin-right: 15px;","padding: 4px 0;","color: #cc7777;","}",".less-error-message pre {","color: #ee4444;","padding: 4px 0;","margin: 0;","display: inline-block;","}",".less-error-message pre.ctx {","color: #dd4444;","}",".less-error-message h3 {","font-size: 20px;","font-weight: bold;","padding: 15px 0 5px 0;","margin: 0;","}",".less-error-message a {","color: #10a","}",".less-error-message .error {","color: red;","font-weight: bold;","padding-bottom: 2px;","border-bottom: 1px dashed red;","}"].join("\n"),{title:"error-message"}),f.style.cssText=["font-family: Arial, sans-serif","border: 1px solid #e00","background-color: #eee","border-radius: 5px","-webkit-border-radius: 5px","-moz-border-radius: 5px","color: #e00","padding: 15px","margin-bottom: 15px"].join(";"),d.env=="development"&&(g=setInterval(function(){document.body&&(document.getElementById(c)?document.body.replaceChild(f,document.getElementById(c)):document.body.insertBefore(f,document.body.firstChild),clearInterval(g))},10))}function u(a){d.env=="development"&&typeof console!="undefined"&&console.log("less: "+a)}function t(a){return a&&a.parentNode.removeChild(a)}function s(){if(a.XMLHttpRequest)return new XMLHttpRequest;try{return new ActiveXObject("MSXML2.XMLHTTP.3.0")}catch(b){u("browser doesn't support AJAX.");return null}}function r(a,b,c,e){function i(b,c,d){b.status>=200&&b.status<300?c(b.responseText,b.getResponseHeader("Last-Modified")):typeof d=="function"&&d(b.status,a)}var f=s(),h=g?!1:d.async;typeof f.overrideMimeType=="function"&&f.overrideMimeType("text/css"),f.open("GET",a,h),f.setRequestHeader("Accept",b||"text/x-less, text/css; q=0.9, */*; q=0.5"),f.send(null),g?f.status===0?c(f.responseText):e(f.status,a):h?f.onreadystatechange=function(){f.readyState==4&&i(f,c,e)}:i(f,c,e)}function q(a,b,c){var d,e=b.href?b.href.replace(/\?.*$/,""):"",f="less:"+(b.title||p(e));(d=document.getElementById(f))===null&&(d=document.createElement("style"),d.type="text/css",d.media=b.media||"screen",d.id=f,document.getElementsByTagName("head")[0].appendChild(d));if(d.styleSheet)try{d.styleSheet.cssText=a}catch(g){throw new Error("Couldn't reassign styleSheet.cssText.")}else(function(a){d.childNodes.length>0?d.firstChild.nodeValue!==a.nodeValue&&d.replaceChild(a,d.firstChild):d.appendChild(a)})(document.createTextNode(a));c&&h&&(u("saving "+e+" to cache."),h.setItem(e,a),h.setItem(e+":timestamp",c))}function p(a){return a.replace(/^[a-z]+:\/\/?[^\/]+/,"").replace(/^\//,"").replace(/\?.*$/,"").replace(/\.[^\.\/]+$/,"").replace(/[^\.\w-]+/g,"-").replace(/\./g,":")}function o(b,c,e,f){var g=a.location.href.replace(/[#?].*$/,""),i=b.href.replace(/\?.*$/,""),j=h&&h.getItem(i),k=h&&h.getItem(i+":timestamp"),l={css:j,timestamp:k};/^(https?|file):/.test(i)||(i.charAt(0)=="/"?i=a.location.protocol+"//"+a.location.host+i:i=g.slice(0,g.lastIndexOf("/")+1)+i),r(b.href,b.type,function(a,g){if(!e&&l&&g&&(new Date(g)).valueOf()===(new Date(l.timestamp)).valueOf())q(l.css,b),c(null,b,{local:!0,remaining:f});else try{(new d.Parser({optimization:d.optimization,paths:[i.replace(/[\w\.-]+$/,"")],mime:b.type})).parse(a,function(a,d){if(a)return v(a,i);try{c(d,b,{local:!1,lastModified:g,remaining:f}),t(document.getElementById("less-error-message:"+p(i)))}catch(a){v(a,i)}})}catch(h){v(h,i)}},function(a,b){throw new Error("Couldn't load "+b+" ("+a+")")})}function n(a,b){for(var c=0;c>>0;for(var d=0;d>>0,c=Array(b),d=arguments[1];for(var e=0;e>>0,c=0;if(b===0&&arguments.length===1)throw new TypeError;if(arguments.length>=2)var d=arguments[1];else for(;;){if(c in this){d=this[c++];break}if(++c>=b)throw new TypeError}for(;c=b)return-1;c<0&&(c+=b);for(;ck&&(j[f]=j[f].slice(c-k),k=c)}function q(){j[f]=g,c=h,k=c}function p(){g=j[f],h=c,k=c}var b,c,f,g,h,i,j,k,l,m=this,n=function(){},o=this.imports={paths:a&&a.paths||[],queue:[],files:{},mime:a&&a.mime,push:function(b,c){var e=this;this.queue.push(b),d.Parser.importer(b,this.paths,function(a){e.queue.splice(e.queue.indexOf(b),1),e.files[b]=a,c(a),e.queue.length===0&&n()},a)}};this.env=a=a||{},this.optimization="optimization"in this.env?this.env.optimization:1,this.env.filename=this.env.filename||null;return l={imports:o,parse:function(d,g){var h,l,m,o,p,q,r=[],t,u=null;c=f=k=i=0,j=[],b=d.replace(/\r\n/g,"\n"),j=function(c){var d=0,e=/[^"'`\{\}\/\(\)]+/g,f=/\/\*(?:[^*]|\*+[^\/*])*\*+\/|\/\/.*/g,g=0,h,i=c[0],j,k;for(var l=0,m,n;l0)throw{type:"Syntax",message:"Missing closing `}`",filename:a.filename};return c.map(function(a){return a.join("")})}([[]]),h=new e.Ruleset([],s(this.parsers.primary)),h.root=!0,h.toCSS=function(c){var d,f,g;return function(g,h){function n(a){return a?(b.slice(0,a).match(/\n/g)||"").length:null}var i=[];g=g||{},typeof h=="object"&&!Array.isArray(h)&&(h=Object.keys(h).map(function(a){var b=h[a];b instanceof e.Value||(b instanceof e.Expression||(b=new e.Expression([b])),b=new e.Value([b]));return new e.Rule("@"+a,b,!1,0)}),i=[new e.Ruleset(null,h)]);try{var j=c.call(this,{frames:i}).toCSS([],{compress:g.compress||!1})}catch(k){f=b.split("\n"),d=n(k.index);for(var l=k.index,m=-1;l>=0&&b.charAt(l)!=="\n";l--)m++;throw{type:k.type,message:k.message,filename:a.filename,index:k.index,line:typeof d=="number"?d+1:null,callLine:k.call&&n(k.call)+1,callExtract:f[n(k.call)],stack:k.stack,column:m,extract:[f[d-1],f[d],f[d+1]]}}return g.compress?j.replace(/(\s)+/g,"$1"):j}}(h.eval);if(c=0&&b.charAt(v)!=="\n";v--)w++;u={name:"ParseError",message:"Syntax Error on line "+p,index:c,filename:a.filename,line:p,column:w,extract:[q[p-2],q[p-1],q[p]]}}this.imports.queue.length>0?n=function(){g(u,h)}:g(u,h)},parsers:{primary:function(){var a,b=[];while((a=s(this.mixin.definition)||s(this.rule)||s(this.ruleset)||s(this.mixin.call)||s(this.comment)||s(this.directive))||s(/^[\s\n]+/))a&&b.push(a);return b},comment:function(){var a;if(b.charAt(c)==="/"){if(b.charAt(c+1)==="/")return new e.Comment(s(/^\/\/.*/),!0);if(a=s(/^\/\*(?:[^*]|\*+[^\/*])*\*+\/\n?/))return new e.Comment(a)}},entities:{quoted:function(){var a,d=c,f;b.charAt(d)==="~"&&(d++,f=!0);if(b.charAt(d)==='"'||b.charAt(d)==="'"){f&&s("~");if(a=s(/^"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'/))return new e.Quoted(a[0],a[1]||a[2],f)}},keyword:function(){var a;if(a=s(/^[A-Za-z-]+/))return new e.Keyword(a)},call:function(){var a,b,d=c;if(!!(a=/^([\w-]+|%)\(/.exec(j[f]))){a=a[1].toLowerCase();if(a==="url")return null;c+=a.length;if(a==="alpha")return s(this.alpha);s("("),b=s(this.entities.arguments);if(!s(")"))return;if(a)return new e.Call(a,b,d)}},arguments:function(){var a=[],b;while(b=s(this.expression)){a.push(b);if(!s(","))break}return a},literal:function(){return s(this.entities.dimension)||s(this.entities.color)||s(this.entities.quoted)},url:function(){var a;if(b.charAt(c)==="u"&&!!s(/^url\(/)){a=s(this.entities.quoted)||s(this.entities.variable)||s(this.entities.dataURI)||s(/^[-\w%@$\/.&=:;#+?~]+/)||"";if(!s(")"))throw new Error("missing closing ) for url()");return new e.URL(a.value||a.data||a instanceof e.Variable?a:new e.Anonymous(a),o.paths)}},dataURI:function(){var a;if(s(/^data:/)){a={},a.mime=s(/^[^\/]+\/[^,;)]+/)||"",a.charset=s(/^;\s*charset=[^,;)]+/)||"",a.base64=s(/^;\s*base64/)||"",a.data=s(/^,\s*[^)]+/);if(a.data)return a}},variable:function(){var a,d=c;if(b.charAt(c)==="@"&&(a=s(/^@@?[\w-]+/)))return new e.Variable(a,d)},color:function(){var a;if(b.charAt(c)==="#"&&(a=s(/^#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})/)))return new e.Color(a[1])},dimension:function(){var a,d=b.charCodeAt(c);if(!(d>57||d<45||d===47))if(a=s(/^(-?\d*\.?\d+)(px|%|em|pc|ex|in|deg|s|ms|pt|cm|mm|rad|grad|turn)?/))return new e.Dimension(a[1],a[2])},javascript:function(){var a,d=c,f;b.charAt(d)==="~"&&(d++,f=!0);if(b.charAt(d)==="`"){f&&s("~");if(a=s(/^`([^`]*)`/))return new e.JavaScript(a[1],c,f)}}},variable:function(){var a;if(b.charAt(c)==="@"&&(a=s(/^(@[\w-]+)\s*:/)))return a[1]},shorthand:function(){var a,b;if(!!t(/^[@\w.%-]+\/[@\w.-]+/)&&(a=s(this.entity))&&s("/")&&(b=s(this.entity)))return new e.Shorthand(a,b)},mixin:{call:function(){var a=[],d,f,g,h=c,i=b.charAt(c);if(i==="."||i==="#"){while(d=s(/^[#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/))a.push(new e.Element(f,d)),f=s(">");s("(")&&(g=s(this.entities.arguments))&&s(")");if(a.length>0&&(s(";")||t("}")))return new e.mixin.Call(a,g,h)}},definition:function(){var a,d=[],f,g,h,i;if(!(b.charAt(c)!=="."&&b.charAt(c)!=="#"||t(/^[^{]*(;|})/)))if(f=s(/^([#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+)\s*\(/)){a=f[1];while(h=s(this.entities.variable)||s(this.entities.literal)||s(this.entities.keyword)){if(h instanceof e.Variable)if(s(":"))if(i=s(this.expression))d.push({name:h.name,value:i});else throw new Error("Expected value");else d.push({name:h.name});else d.push({value:h});if(!s(","))break}if(!s(")"))throw new Error("Expected )");g=s(this.block);if(g)return new e.mixin.Definition(a,d,g)}}},entity:function(){return s(this.entities.literal)||s(this.entities.variable)||s(this.entities.url)||s(this.entities.call)||s(this.entities.keyword)||s(this.entities.javascript)||s(this.comment)},end:function(){return s(";")||t("}")},alpha:function(){var a;if(!!s(/^\(opacity=/i))if(a=s(/^\d+/)||s(this.entities.variable)){if(!s(")"))throw new Error("missing closing ) for alpha()");return new e.Alpha(a)}},element:function(){var a,b,c;c=s(this.combinator),a=s(/^(?:[.#]?|:*)(?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/)||s("*")||s(this.attribute)||s(/^\([^)@]+\)/);if(a)return new e.Element(c,a)},combinator:function(){var a,d=b.charAt(c);if(d===">"||d==="&"||d==="+"||d==="~"){c++;while(b.charAt(c)===" ")c++;return new e.Combinator(d)}if(d===":"&&b.charAt(c+1)===":"){c+=2;while(b.charAt(c)===" ")c++;return new e.Combinator("::")}return b.charAt(c-1)===" "?new e.Combinator(" "):new e.Combinator(null)},selector:function(){var a,d,f=[],g,h;while(d=s(this.element)){g=b.charAt(c),f.push(d);if(g==="{"||g==="}"||g===";"||g===",")break}if(f.length>0)return new e.Selector(f)},tag:function(){return s(/^[a-zA-Z][a-zA-Z-]*[0-9]?/)||s("*")},attribute:function(){var a="",b,c,d;if(!!s("[")){if(b=s(/^[a-zA-Z-]+/)||s(this.entities.quoted))(d=s(/^[|~*$^]?=/))&&(c=s(this.entities.quoted)||s(/^[\w-]+/))?a=[b,d,c.toCSS?c.toCSS():c].join(""):a=b;if(!s("]"))return;if(a)return"["+a+"]"}},block:function(){var a;if(s("{")&&(a=s(this.primary))&&s("}"))return a},ruleset:function(){var a=[],b,d,g;p();if(g=/^([.#: \w-]+)[\s\n]*\{/.exec(j[f]))c+=g[0].length-1,a=[new e.Selector([new e.Element(null,g[1])])];else while(b=s(this.selector)){a.push(b),s(this.comment);if(!s(","))break;s(this.comment)}if(a.length>0&&(d=s(this.block)))return new e.Ruleset(a,d);i=c,q()},rule:function(){var a,d,g=b.charAt(c),k,l;p();if(g!=="."&&g!=="#"&&g!=="&")if(a=s(this.variable)||s(this.property)){a.charAt(0)!="@"&&(l=/^([^@+\/'"*`(;{}-]*);/.exec(j[f]))?(c+=l[0].length-1,d=new e.Anonymous(l[1])):a==="font"?d=s(this.font):d=s(this.value),k=s(this.important);if(d&&s(this.end))return new e.Rule(a,d,k,h);i=c,q()}},"import":function(){var a;if(s(/^@import\s+/)&&(a=s(this.entities.quoted)||s(this.entities.url))&&s(";"))return new e.Import(a,o)},directive:function(){var a,d,f,g;if(b.charAt(c)==="@"){if(d=s(this["import"]))return d;if(a=s(/^@media|@page|@-[-a-z]+/)){g=(s(/^[^{]+/)||"").trim();if(f=s(this.block))return new e.Directive(a+" "+g,f)}else if(a=s(/^@[-a-z]+/))if(a==="@font-face"){if(f=s(this.block))return new e.Directive(a,f)}else if((d=s(this.entity))&&s(";"))return new e.Directive(a,d)}},font:function(){var a=[],b=[],c,d,f,g;while(g=s(this.shorthand)||s(this.entity))b.push(g);a.push(new e.Expression(b));if(s(","))while(g=s(this.expression)){a.push(g);if(!s(","))break}return new e.Value(a)},value:function(){var a,b=[],c;while(a=s(this.expression)){b.push(a);if(!s(","))break}if(b.length>0)return new e.Value(b)},important:function(){if(b.charAt(c)==="!")return s(/^! *important/)},sub:function(){var a;if(s("(")&&(a=s(this.expression))&&s(")"))return a},multiplication:function(){var a,b,c,d;if(a=s(this.operand)){while((c=s("/")||s("*"))&&(b=s(this.operand)))d=new e.Operation(c,[d||a,b]);return d||a}},addition:function(){var a,d,f,g;if(a=s(this.multiplication)){while((f=s(/^[-+]\s+/)||b.charAt(c-1)!=" "&&(s("+")||s("-")))&&(d=s(this.multiplication)))g=new e.Operation(f,[g||a,d]);return g||a}},operand:function(){var a,d=b.charAt(c+1);b.charAt(c)==="-"&&(d==="@"||d==="(")&&(a=s("-"));var f=s(this.sub)||s(this.entities.dimension)||s(this.entities.color)||s(this.entities.variable)||s(this.entities.call);return a?new e.Operation("*",[new e.Dimension(-1),f]):f},expression:function(){var a,b,c=[],d;while(a=s(this.addition)||s(this.entity))c.push(a);if(c.length>0)return new e.Expression(c)},property:function(){var a;if(a=s(/^(\*?-?[-a-z_0-9]+)\s*:/))return a[1]}}}},typeof a!="undefined"&&(d.Parser.importer=function(a,b,c,d){a.charAt(0)!=="/"&&b.length>0&&(a=b[0]+a),o({href:a,title:a,type:d.mime},c,!0)}),function(a){function d(a){return Math.min(1,Math.max(0,a))}function c(b){if(b instanceof a.Dimension)return parseFloat(b.unit=="%"?b.value/100:b.value);if(typeof b=="number")return b;throw{error:"RuntimeError",message:"color functions take numbers as parameters"}}function b(b){return a.functions.hsla(b.h,b.s,b.l,b.a)}a.functions={rgb:function(a,b,c){return this.rgba(a,b,c,1)},rgba:function(b,d,e,f){var g=[b,d,e].map(function(a){return c(a)}),f=c(f);return new a.Color(g,f)},hsl:function(a,b,c){return this.hsla(a,b,c,1)},hsla:function(a,b,d,e){function h(a){a=a<0?a+1:a>1?a-1:a;return a*6<1?g+(f-g)*a*6:a*2<1?f:a*3<2?g+(f-g)*(2/3-a)*6:g}a=c(a)%360/360,b=c(b),d=c(d),e=c(e);var f=d<=.5?d*(b+1):d+b-d*b,g=d*2-f;return this.rgba(h(a+1/3)*255,h(a)*255,h(a-1/3)*255,e)},hue:function(b){return new a.Dimension(Math.round(b.toHSL().h))},saturation:function(b){return new a.Dimension(Math.round(b.toHSL().s*100),"%")},lightness:function(b){return new a.Dimension(Math.round(b.toHSL().l*100),"%")},alpha:function(b){return new a.Dimension(b.toHSL().a)},saturate:function(a,c){var e=a.toHSL();e.s+=c.value/100,e.s=d(e.s);return b(e)},desaturate:function(a,c){var e=a.toHSL();e.s-=c.value/100,e.s=d(e.s);return b(e)},lighten:function(a,c){var e=a.toHSL();e.l+=c.value/100,e.l=d(e.l);return b(e)},darken:function(a,c){var e=a.toHSL();e.l-=c.value/100,e.l=d(e.l);return b(e)},fadein:function(a,c){var e=a.toHSL();e.a+=c.value/100,e.a=d(e.a);return b(e)},fadeout:function(a,c){var e=a.toHSL();e.a-=c.value/100,e.a=d(e.a);return b(e)},spin:function(a,c){var d=a.toHSL(),e=(d.h+c.value)%360;d.h=e<0?360+e:e;return b(d)},mix:function(b,c,d){var e=d.value/100,f=e*2-1,g=b.toHSL().a-c.toHSL().a,h=((f*g==-1?f:(f+g)/(1+f*g))+1)/2,i=1-h,j=[b.rgb[0]*h+c.rgb[0]*i,b.rgb[1]*h+c.rgb[1]*i,b.rgb[2]*h+c.rgb[2]*i],k=b.alpha*e+c.alpha*(1-e);return new a.Color(j,k)},greyscale:function(b){return this.desaturate(b,new a.Dimension(100))},e:function(b){return new a.Anonymous(b instanceof a.JavaScript?b.evaluated:b)},escape:function(b){return new a.Anonymous(encodeURI(b.value).replace(/=/g,"%3D").replace(/:/g,"%3A").replace(/#/g,"%23").replace(/;/g,"%3B").replace(/\(/g,"%28").replace(/\)/g,"%29"))},"%":function(b){var c=Array.prototype.slice.call(arguments,1),d=b.value;for(var e=0;e255?255:a<0?0:a).toString(16);return a.length===1?"0"+a:a}).join("")},operate:function(b,c){var d=[];c instanceof a.Color||(c=c.toColor());for(var e=0;e<3;e++)d[e]=a.operate(b,this.rgb[e],c.rgb[e]);return new a.Color(d,this.alpha+c.alpha)},toHSL:function(){var a=this.rgb[0]/255,b=this.rgb[1]/255,c=this.rgb[2]/255,d=this.alpha,e=Math.max(a,b,c),f=Math.min(a,b,c),g,h,i=(e+f)/2,j=e-f;if(e===f)g=h=0;else{h=i>.5?j/(2-e-f):j/(e+f);switch(e){case a:g=(b-c)/j+(b":a.compress?">":" > "}[this.value]}}(c("less/tree")),function(a){a.Expression=function(a){this.value=a},a.Expression.prototype={eval:function(b){return this.value.length>1?new a.Expression(this.value.map(function(a){return a.eval(b)})):this.value.length===1?this.value[0].eval(b):this},toCSS:function(a){return this.value.map(function(b){return b.toCSS(a)}).join(" ")}}}(c("less/tree")),function(a){a.Import=function(b,c){var d=this;this._path=b,b instanceof a.Quoted?this.path=/\.(le?|c)ss$/.test(b.value)?b.value:b.value+".less":this.path=b.value.value||b.value,this.css=/css$/.test(this.path),this.css||c.push(this.path,function(a){if(!a)throw new Error("Error parsing "+d.path);d.root=a})},a.Import.prototype={toCSS:function(){return this.css?"@import "+this._path.toCSS()+";\n":""},eval:function(b){var c;if(this.css)return this;c=new a.Ruleset(null,this.root.rules.slice(0));for(var d=0;d0){c=this.arguments&&this.arguments.map(function(b){return b.eval(a)});for(var g=0;g0&&c>this.params.length)return!1;d=Math.min(c,this.arity);for(var e=0;e1?Array.prototype.push.apply(d,e.find(new a.Selector(b.elements.slice(1)),c)):d.push(e);break}});return this._lookups[g]=d},toCSS:function(b,c){var d=[],e=[],f=[],g=[],h,i;if(!this.root)if(b.length===0)g=this.selectors.map(function(a){return[a]});else for(var j=0;j0&&(h=g.map(function(a){return a.map(function(a){return a.toCSS(c)}).join("").trim()}).join(c.compress?",":g.length>3?",\n":", "),d.push(h,(c.compress?"{":" {\n ")+e.join(c.compress?"":"\n ")+(c.compress?"}":"\n}\n"))),d.push(f);return d.join("")+(c.compress?"\n":"")}}}(c("less/tree")),function(a){a.Selector=function(a){this.elements=a,this.elements[0].combinator.value===""&&(this.elements[0].combinator.value=" ")},a.Selector.prototype.match=function(a){return this.elements[0].value===a.elements[0].value?!0:!1},a.Selector.prototype.toCSS=function(a){if(this._css)return this._css;return this._css=this.elements.map(function(b){return typeof b=="string"?" "+b.trim():b.toCSS(a)}).join("")}}(c("less/tree")),function(b){b.URL=function(b,c){b.data?this.attrs=b:(!/^(?:https?:\/|file:\/|data:\/)?\//.test(b.value)&&c.length>0&&typeof a!="undefined"&&(b.value=c[0]+(b.value.charAt(0)==="/"?b.value.slice(1):b.value)),this.value=b,this.paths=c)},b.URL.prototype={toCSS:function(){return"url("+(this.attrs?"data:"+this.attrs.mime+this.attrs.charset+this.attrs.base64+this.attrs.data:this.value.toCSS())+")"},eval:function(a){return this.attrs?this:new b.URL(this.value.eval(a),this.paths)}}}(c("less/tree")),function(a){a.Value=function(a){this.value=a,this.is="value"},a.Value.prototype={eval:function(b){return this.value.length===1?this.value[0].eval(b):new a.Value(this.value.map(function(a){return a.eval(b)}))},toCSS:function(a){return this.value.map(function(b){return b.toCSS(a)}).join(a.compress?",":", ")}}}(c("less/tree")),function(a){a.Variable=function(a,b){this.name=a,this +.index=b},a.Variable.prototype={eval:function(b){var c,d,e=this.name;e.indexOf("@@")==0&&(e="@"+(new a.Variable(e.slice(1))).eval(b).value);if(c=a.find(b.frames,function(a){if(d=a.variable(e))return d.value.eval(b)}))return c;throw{message:"variable "+e+" is undefined",index:this.index}}}}(c("less/tree")),c("less/tree").find=function(a,b){for(var c=0,d;c1?"["+a.value.map(function(a){return a.toCSS(!1)}).join(", ")+"]":a.toCSS(!1)};var g=location.protocol==="file:"||location.protocol==="chrome:"||location.protocol==="chrome-extension:"||location.protocol==="resource:";d.env=d.env||(location.hostname=="127.0.0.1"||location.hostname=="0.0.0.0"||location.hostname=="localhost"||location.port.length>0||g?"development":"production"),d.async=!1,d.poll=d.poll||(g?1e3:1500),d.watch=function(){return this.watchMode=!0},d.unwatch=function(){return this.watchMode=!1},d.env==="development"?(d.optimization=0,/!watch/.test(location.hash)&&d.watch(),d.watchTimer=setInterval(function(){d.watchMode&&n(function(a,b,c){a&&q(a.toCSS(),b,c.lastModified)})},d.poll)):d.optimization=3;var h;try{h=typeof a.localStorage=="undefined"?null:a.localStorage}catch(i){h=null}var j=document.getElementsByTagName("link"),k=/^text\/(x-)?less$/;d.sheets=[];for(var l=0;l>> 0; + for (var i = 0; i < len; i++) { + if (i in this) { + block.call(thisObject, this[i], i, this); + } + } + }; +} +if (!Array.prototype.map) { + Array.prototype.map = function(fun /*, thisp*/) { + var len = this.length >>> 0; + var res = new Array(len); + var thisp = arguments[1]; + + for (var i = 0; i < len; i++) { + if (i in this) { + res[i] = fun.call(thisp, this[i], i, this); + } + } + return res; + }; +} +if (!Array.prototype.filter) { + Array.prototype.filter = function (block /*, thisp */) { + var values = []; + var thisp = arguments[1]; + for (var i = 0; i < this.length; i++) { + if (block.call(thisp, this[i])) { + values.push(this[i]); + } + } + return values; + }; +} +if (!Array.prototype.reduce) { + Array.prototype.reduce = function(fun /*, initial*/) { + var len = this.length >>> 0; + var i = 0; + + // no value to return if no initial value and an empty array + if (len === 0 && arguments.length === 1) throw new TypeError(); + + if (arguments.length >= 2) { + var rv = arguments[1]; + } else { + do { + if (i in this) { + rv = this[i++]; + break; + } + // if array contains no values, no initial value to return + if (++i >= len) throw new TypeError(); + } while (true); + } + for (; i < len; i++) { + if (i in this) { + rv = fun.call(null, rv, this[i], i, this); + } + } + return rv; + }; +} +if (!Array.prototype.indexOf) { + Array.prototype.indexOf = function (value /*, fromIndex */ ) { + var length = this.length; + var i = arguments[1] || 0; + + if (!length) return -1; + if (i >= length) return -1; + if (i < 0) i += length; + + for (; i < length; i++) { + if (!Object.prototype.hasOwnProperty.call(this, i)) { continue } + if (value === this[i]) return i; + } + return -1; + }; +} + +// +// Object +// +if (!Object.keys) { + Object.keys = function (object) { + var keys = []; + for (var name in object) { + if (Object.prototype.hasOwnProperty.call(object, name)) { + keys.push(name); + } + } + return keys; + }; +} + +// +// String +// +if (!String.prototype.trim) { + String.prototype.trim = function () { + return String(this).replace(/^\s\s*/, '').replace(/\s\s*$/, ''); + }; +} +var less, tree; + +if (typeof(window) === 'undefined') { + less = exports, + tree = require('less/tree'); +} else { + if (typeof(window.less) === 'undefined') { window.less = {} } + less = window.less, + tree = window.less.tree = {}; +} +// +// less.js - parser +// +// A relatively straight-forward predictive parser. +// There is no tokenization/lexing stage, the input is parsed +// in one sweep. +// +// To make the parser fast enough to run in the browser, several +// optimization had to be made: +// +// - Matching and slicing on a huge input is often cause of slowdowns. +// The solution is to chunkify the input into smaller strings. +// The chunks are stored in the `chunks` var, +// `j` holds the current chunk index, and `current` holds +// the index of the current chunk in relation to `input`. +// This gives us an almost 4x speed-up. +// +// - In many cases, we don't need to match individual tokens; +// for example, if a value doesn't hold any variables, operations +// or dynamic references, the parser can effectively 'skip' it, +// treating it as a literal. +// An example would be '1px solid #000' - which evaluates to itself, +// we don't need to know what the individual components are. +// The drawback, of course is that you don't get the benefits of +// syntax-checking on the CSS. This gives us a 50% speed-up in the parser, +// and a smaller speed-up in the code-gen. +// +// +// Token matching is done with the `$` function, which either takes +// a terminal string or regexp, or a non-terminal function to call. +// It also takes care of moving all the indices forwards. +// +// +less.Parser = function Parser(env) { + var input, // LeSS input string + i, // current index in `input` + j, // current chunk + temp, // temporarily holds a chunk's state, for backtracking + memo, // temporarily holds `i`, when backtracking + furthest, // furthest index the parser has gone to + chunks, // chunkified input + current, // index of current chunk, in `input` + parser; + + var that = this; + + // This function is called after all files + // have been imported through `@import`. + var finish = function () {}; + + var imports = this.imports = { + paths: env && env.paths || [], // Search paths, when importing + queue: [], // Files which haven't been imported yet + files: {}, // Holds the imported parse trees + mime: env && env.mime, // MIME type of .less files + push: function (path, callback) { + var that = this; + this.queue.push(path); + + // + // Import a file asynchronously + // + less.Parser.importer(path, this.paths, function (root) { + that.queue.splice(that.queue.indexOf(path), 1); // Remove the path from the queue + that.files[path] = root; // Store the root + + callback(root); + + if (that.queue.length === 0) { finish() } // Call `finish` if we're done importing + }, env); + } + }; + + function save() { temp = chunks[j], memo = i, current = i } + function restore() { chunks[j] = temp, i = memo, current = i } + + function sync() { + if (i > current) { + chunks[j] = chunks[j].slice(i - current); + current = i; + } + } + // + // Parse from a token, regexp or string, and move forward if match + // + function $(tok) { + var match, args, length, c, index, endIndex, k, mem; + + // + // Non-terminal + // + if (tok instanceof Function) { + return tok.call(parser.parsers); + // + // Terminal + // + // Either match a single character in the input, + // or match a regexp in the current chunk (chunk[j]). + // + } else if (typeof(tok) === 'string') { + match = input.charAt(i) === tok ? tok : null; + length = 1; + sync (); + } else { + sync (); + + if (match = tok.exec(chunks[j])) { + length = match[0].length; + } else { + return null; + } + } + + // The match is confirmed, add the match length to `i`, + // and consume any extra white-space characters (' ' || '\n') + // which come after that. The reason for this is that LeSS's + // grammar is mostly white-space insensitive. + // + if (match) { + mem = i += length; + endIndex = i + chunks[j].length - length; + + while (i < endIndex) { + c = input.charCodeAt(i); + if (! (c === 32 || c === 10 || c === 9)) { break } + i++; + } + chunks[j] = chunks[j].slice(length + (i - mem)); + current = i; + + if (chunks[j].length === 0 && j < chunks.length - 1) { j++ } + + if(typeof(match) === 'string') { + return match; + } else { + return match.length === 1 ? match[0] : match; + } + } + } + + // Same as $(), but don't change the state of the parser, + // just return the match. + function peek(tok) { + if (typeof(tok) === 'string') { + return input.charAt(i) === tok; + } else { + if (tok.test(chunks[j])) { + return true; + } else { + return false; + } + } + } + + this.env = env = env || {}; + + // The optimization level dictates the thoroughness of the parser, + // the lower the number, the less nodes it will create in the tree. + // This could matter for debugging, or if you want to access + // the individual nodes in the tree. + this.optimization = ('optimization' in this.env) ? this.env.optimization : 1; + + this.env.filename = this.env.filename || null; + + // + // The Parser + // + return parser = { + + imports: imports, + // + // Parse an input string into an abstract syntax tree, + // call `callback` when done. + // + parse: function (str, callback) { + var root, start, end, zone, line, lines, buff = [], c, error = null; + + i = j = current = furthest = 0; + chunks = []; + input = str.replace(/\r\n/g, '\n'); + + // Split the input into chunks. + chunks = (function (chunks) { + var j = 0, + skip = /[^"'`\{\}\/\(\)]+/g, + comment = /\/\*(?:[^*]|\*+[^\/*])*\*+\/|\/\/.*/g, + level = 0, + match, + chunk = chunks[0], + inParam, + inString; + + for (var i = 0, c, cc; i < input.length; i++) { + skip.lastIndex = i; + if (match = skip.exec(input)) { + if (match.index === i) { + i += match[0].length; + chunk.push(match[0]); + } + } + c = input.charAt(i); + comment.lastIndex = i; + + if (!inString && !inParam && c === '/') { + cc = input.charAt(i + 1); + if (cc === '/' || cc === '*') { + if (match = comment.exec(input)) { + if (match.index === i) { + i += match[0].length; + chunk.push(match[0]); + c = input.charAt(i); + } + } + } + } + + if (c === '{' && !inString && !inParam) { level ++; + chunk.push(c); + } else if (c === '}' && !inString && !inParam) { level --; + chunk.push(c); + chunks[++j] = chunk = []; + } else if (c === '(' && !inString && !inParam) { + chunk.push(c); + inParam = true; + } else if (c === ')' && !inString && inParam) { + chunk.push(c); + inParam = false; + } else { + if (c === '"' || c === "'" || c === '`') { + if (! inString) { + inString = c; + } else { + inString = inString === c ? false : inString; + } + } + chunk.push(c); + } + } + if (level > 0) { + throw { + type: 'Syntax', + message: "Missing closing `}`", + filename: env.filename + }; + } + + return chunks.map(function (c) { return c.join('') });; + })([[]]); + + // Start with the primary rule. + // The whole syntax tree is held under a Ruleset node, + // with the `root` property set to true, so no `{}` are + // output. The callback is called when the input is parsed. + root = new(tree.Ruleset)([], $(this.parsers.primary)); + root.root = true; + + root.toCSS = (function (evaluate) { + var line, lines, column; + + return function (options, variables) { + var frames = []; + + options = options || {}; + // + // Allows setting variables with a hash, so: + // + // `{ color: new(tree.Color)('#f01') }` will become: + // + // new(tree.Rule)('@color', + // new(tree.Value)([ + // new(tree.Expression)([ + // new(tree.Color)('#f01') + // ]) + // ]) + // ) + // + if (typeof(variables) === 'object' && !Array.isArray(variables)) { + variables = Object.keys(variables).map(function (k) { + var value = variables[k]; + + if (! (value instanceof tree.Value)) { + if (! (value instanceof tree.Expression)) { + value = new(tree.Expression)([value]); + } + value = new(tree.Value)([value]); + } + return new(tree.Rule)('@' + k, value, false, 0); + }); + frames = [new(tree.Ruleset)(null, variables)]; + } + + try { + var css = evaluate.call(this, { frames: frames }) + .toCSS([], { compress: options.compress || false }); + } catch (e) { + lines = input.split('\n'); + line = getLine(e.index); + + for (var n = e.index, column = -1; + n >= 0 && input.charAt(n) !== '\n'; + n--) { column++ } + + throw { + type: e.type, + message: e.message, + filename: env.filename, + index: e.index, + line: typeof(line) === 'number' ? line + 1 : null, + callLine: e.call && (getLine(e.call) + 1), + callExtract: lines[getLine(e.call)], + stack: e.stack, + column: column, + extract: [ + lines[line - 1], + lines[line], + lines[line + 1] + ] + }; + } + if (options.compress) { + return css.replace(/(\s)+/g, "$1"); + } else { + return css; + } + + function getLine(index) { + return index ? (input.slice(0, index).match(/\n/g) || "").length : null; + } + }; + })(root.eval); + + // If `i` is smaller than the `input.length - 1`, + // it means the parser wasn't able to parse the whole + // string, so we've got a parsing error. + // + // We try to extract a \n delimited string, + // showing the line where the parse error occured. + // We split it up into two parts (the part which parsed, + // and the part which didn't), so we can color them differently. + if (i < input.length - 1) { + i = furthest; + lines = input.split('\n'); + line = (input.slice(0, i).match(/\n/g) || "").length + 1; + + for (var n = i, column = -1; n >= 0 && input.charAt(n) !== '\n'; n--) { column++ } + + error = { + name: "ParseError", + message: "Syntax Error on line " + line, + index: i, + filename: env.filename, + line: line, + column: column, + extract: [ + lines[line - 2], + lines[line - 1], + lines[line] + ] + }; + } + + if (this.imports.queue.length > 0) { + finish = function () { callback(error, root) }; + } else { + callback(error, root); + } + }, + + // + // Here in, the parsing rules/functions + // + // The basic structure of the syntax tree generated is as follows: + // + // Ruleset -> Rule -> Value -> Expression -> Entity + // + // Here's some LESS code: + // + // .class { + // color: #fff; + // border: 1px solid #000; + // width: @w + 4px; + // > .child {...} + // } + // + // And here's what the parse tree might look like: + // + // Ruleset (Selector '.class', [ + // Rule ("color", Value ([Expression [Color #fff]])) + // Rule ("border", Value ([Expression [Dimension 1px][Keyword "solid"][Color #000]])) + // Rule ("width", Value ([Expression [Operation "+" [Variable "@w"][Dimension 4px]]])) + // Ruleset (Selector [Element '>', '.child'], [...]) + // ]) + // + // In general, most rules will try to parse a token with the `$()` function, and if the return + // value is truly, will return a new node, of the relevant type. Sometimes, we need to check + // first, before parsing, that's when we use `peek()`. + // + parsers: { + // + // The `primary` rule is the *entry* and *exit* point of the parser. + // The rules here can appear at any level of the parse tree. + // + // The recursive nature of the grammar is an interplay between the `block` + // rule, which represents `{ ... }`, the `ruleset` rule, and this `primary` rule, + // as represented by this simplified grammar: + // + // primary → (ruleset | rule)+ + // ruleset → selector+ block + // block → '{' primary '}' + // + // Only at one point is the primary rule not called from the + // block rule: at the root level. + // + primary: function () { + var node, root = []; + + while ((node = $(this.mixin.definition) || $(this.rule) || $(this.ruleset) || + $(this.mixin.call) || $(this.comment) || $(this.directive)) + || $(/^[\s\n]+/)) { + node && root.push(node); + } + return root; + }, + + // We create a Comment node for CSS comments `/* */`, + // but keep the LeSS comments `//` silent, by just skipping + // over them. + comment: function () { + var comment; + + if (input.charAt(i) !== '/') return; + + if (input.charAt(i + 1) === '/') { + return new(tree.Comment)($(/^\/\/.*/), true); + } else if (comment = $(/^\/\*(?:[^*]|\*+[^\/*])*\*+\/\n?/)) { + return new(tree.Comment)(comment); + } + }, + + // + // Entities are tokens which can be found inside an Expression + // + entities: { + // + // A string, which supports escaping " and ' + // + // "milky way" 'he\'s the one!' + // + quoted: function () { + var str, j = i, e; + + if (input.charAt(j) === '~') { j++, e = true } // Escaped strings + if (input.charAt(j) !== '"' && input.charAt(j) !== "'") return; + + e && $('~'); + + if (str = $(/^"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'/)) { + return new(tree.Quoted)(str[0], str[1] || str[2], e); + } + }, + + // + // A catch-all word, such as: + // + // black border-collapse + // + keyword: function () { + var k; + if (k = $(/^[A-Za-z-]+/)) { return new(tree.Keyword)(k) } + }, + + // + // A function call + // + // rgb(255, 0, 255) + // + // We also try to catch IE's `alpha()`, but let the `alpha` parser + // deal with the details. + // + // The arguments are parsed with the `entities.arguments` parser. + // + call: function () { + var name, args, index = i; + + if (! (name = /^([\w-]+|%)\(/.exec(chunks[j]))) return; + + name = name[1].toLowerCase(); + + if (name === 'url') { return null } + else { i += name.length } + + if (name === 'alpha') { return $(this.alpha) } + + $('('); // Parse the '(' and consume whitespace. + + args = $(this.entities.arguments); + + if (! $(')')) return; + + if (name) { return new(tree.Call)(name, args, index) } + }, + arguments: function () { + var args = [], arg; + + while (arg = $(this.expression)) { + args.push(arg); + if (! $(',')) { break } + } + return args; + }, + literal: function () { + return $(this.entities.dimension) || + $(this.entities.color) || + $(this.entities.quoted); + }, + + // + // Parse url() tokens + // + // We use a specific rule for urls, because they don't really behave like + // standard function calls. The difference is that the argument doesn't have + // to be enclosed within a string, so it can't be parsed as an Expression. + // + url: function () { + var value; + + if (input.charAt(i) !== 'u' || !$(/^url\(/)) return; + value = $(this.entities.quoted) || $(this.entities.variable) || + $(this.entities.dataURI) || $(/^[-\w%@$\/.&=:;#+?~]+/) || ""; + if (! $(')')) throw new(Error)("missing closing ) for url()"); + + return new(tree.URL)((value.value || value.data || value instanceof tree.Variable) + ? value : new(tree.Anonymous)(value), imports.paths); + }, + + dataURI: function () { + var obj; + + if ($(/^data:/)) { + obj = {}; + obj.mime = $(/^[^\/]+\/[^,;)]+/) || ''; + obj.charset = $(/^;\s*charset=[^,;)]+/) || ''; + obj.base64 = $(/^;\s*base64/) || ''; + obj.data = $(/^,\s*[^)]+/); + + if (obj.data) { return obj } + } + }, + + // + // A Variable entity, such as `@fink`, in + // + // width: @fink + 2px + // + // We use a different parser for variable definitions, + // see `parsers.variable`. + // + variable: function () { + var name, index = i; + + if (input.charAt(i) === '@' && (name = $(/^@@?[\w-]+/))) { + return new(tree.Variable)(name, index); + } + }, + + // + // A Hexadecimal color + // + // #4F3C2F + // + // `rgb` and `hsl` colors are parsed through the `entities.call` parser. + // + color: function () { + var rgb; + + if (input.charAt(i) === '#' && (rgb = $(/^#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})/))) { + return new(tree.Color)(rgb[1]); + } + }, + + // + // A Dimension, that is, a number and a unit + // + // 0.5em 95% + // + dimension: function () { + var value, c = input.charCodeAt(i); + if ((c > 57 || c < 45) || c === 47) return; + + if (value = $(/^(-?\d*\.?\d+)(px|%|em|pc|ex|in|deg|s|ms|pt|cm|mm|rad|grad|turn)?/)) { + return new(tree.Dimension)(value[1], value[2]); + } + }, + + // + // JavaScript code to be evaluated + // + // `window.location.href` + // + javascript: function () { + var str, j = i, e; + + if (input.charAt(j) === '~') { j++, e = true } // Escaped strings + if (input.charAt(j) !== '`') { return } + + e && $('~'); + + if (str = $(/^`([^`]*)`/)) { + return new(tree.JavaScript)(str[1], i, e); + } + } + }, + + // + // The variable part of a variable definition. Used in the `rule` parser + // + // @fink: + // + variable: function () { + var name; + + if (input.charAt(i) === '@' && (name = $(/^(@[\w-]+)\s*:/))) { return name[1] } + }, + + // + // A font size/line-height shorthand + // + // small/12px + // + // We need to peek first, or we'll match on keywords and dimensions + // + shorthand: function () { + var a, b; + + if (! peek(/^[@\w.%-]+\/[@\w.-]+/)) return; + + if ((a = $(this.entity)) && $('/') && (b = $(this.entity))) { + return new(tree.Shorthand)(a, b); + } + }, + + // + // Mixins + // + mixin: { + // + // A Mixin call, with an optional argument list + // + // #mixins > .square(#fff); + // .rounded(4px, black); + // .button; + // + // The `while` loop is there because mixins can be + // namespaced, but we only support the child and descendant + // selector for now. + // + call: function () { + var elements = [], e, c, args, index = i, s = input.charAt(i); + + if (s !== '.' && s !== '#') { return } + + while (e = $(/^[#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/)) { + elements.push(new(tree.Element)(c, e)); + c = $('>'); + } + $('(') && (args = $(this.entities.arguments)) && $(')'); + + if (elements.length > 0 && ($(';') || peek('}'))) { + return new(tree.mixin.Call)(elements, args, index); + } + }, + + // + // A Mixin definition, with a list of parameters + // + // .rounded (@radius: 2px, @color) { + // ... + // } + // + // Until we have a finer grained state-machine, we have to + // do a look-ahead, to make sure we don't have a mixin call. + // See the `rule` function for more information. + // + // We start by matching `.rounded (`, and then proceed on to + // the argument list, which has optional default values. + // We store the parameters in `params`, with a `value` key, + // if there is a value, such as in the case of `@radius`. + // + // Once we've got our params list, and a closing `)`, we parse + // the `{...}` block. + // + definition: function () { + var name, params = [], match, ruleset, param, value; + + if ((input.charAt(i) !== '.' && input.charAt(i) !== '#') || + peek(/^[^{]*(;|})/)) return; + + if (match = $(/^([#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+)\s*\(/)) { + name = match[1]; + + while (param = $(this.entities.variable) || $(this.entities.literal) + || $(this.entities.keyword)) { + // Variable + if (param instanceof tree.Variable) { + if ($(':')) { + if (value = $(this.expression)) { + params.push({ name: param.name, value: value }); + } else { + throw new(Error)("Expected value"); + } + } else { + params.push({ name: param.name }); + } + } else { + params.push({ value: param }); + } + if (! $(',')) { break } + } + if (! $(')')) throw new(Error)("Expected )"); + + ruleset = $(this.block); + + if (ruleset) { + return new(tree.mixin.Definition)(name, params, ruleset); + } + } + } + }, + + // + // Entities are the smallest recognized token, + // and can be found inside a rule's value. + // + entity: function () { + return $(this.entities.literal) || $(this.entities.variable) || $(this.entities.url) || + $(this.entities.call) || $(this.entities.keyword) || $(this.entities.javascript) || + $(this.comment); + }, + + // + // A Rule terminator. Note that we use `peek()` to check for '}', + // because the `block` rule will be expecting it, but we still need to make sure + // it's there, if ';' was ommitted. + // + end: function () { + return $(';') || peek('}'); + }, + + // + // IE's alpha function + // + // alpha(opacity=88) + // + alpha: function () { + var value; + + if (! $(/^\(opacity=/i)) return; + if (value = $(/^\d+/) || $(this.entities.variable)) { + if (! $(')')) throw new(Error)("missing closing ) for alpha()"); + return new(tree.Alpha)(value); + } + }, + + // + // A Selector Element + // + // div + // + h1 + // #socks + // input[type="text"] + // + // Elements are the building blocks for Selectors, + // they are made out of a `Combinator` (see combinator rule), + // and an element name, such as a tag a class, or `*`. + // + element: function () { + var e, t, c; + + c = $(this.combinator); + e = $(/^(?:[.#]?|:*)(?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/) || $('*') || $(this.attribute) || $(/^\([^)@]+\)/) || $(/^(?:\d*\.)?\d+%/); + + if (e) { return new(tree.Element)(c, e) } + + if (c.value && c.value[0] === '&') { + return new(tree.Element)(c, null); + } + }, + + // + // Combinators combine elements together, in a Selector. + // + // Because our parser isn't white-space sensitive, special care + // has to be taken, when parsing the descendant combinator, ` `, + // as it's an empty space. We have to check the previous character + // in the input, to see if it's a ` ` character. More info on how + // we deal with this in *combinator.js*. + // + combinator: function () { + var match, c = input.charAt(i); + + if (c === '>' || c === '+' || c === '~') { + i++; + while (input.charAt(i) === ' ') { i++ } + return new(tree.Combinator)(c); + } else if (c === '&') { + match = '&'; + i++; + if(input.charAt(i) === ' ') { + match = '& '; + } + while (input.charAt(i) === ' ') { i++ } + return new(tree.Combinator)(match); + } else if (c === ':' && input.charAt(i + 1) === ':') { + i += 2; + while (input.charAt(i) === ' ') { i++ } + return new(tree.Combinator)('::'); + } else if (input.charAt(i - 1) === ' ') { + return new(tree.Combinator)(" "); + } else { + return new(tree.Combinator)(null); + } + }, + + // + // A CSS Selector + // + // .class > div + h1 + // li a:hover + // + // Selectors are made out of one or more Elements, see above. + // + selector: function () { + var sel, e, elements = [], c, match; + + while (e = $(this.element)) { + c = input.charAt(i); + elements.push(e) + if (c === '{' || c === '}' || c === ';' || c === ',') { break } + } + + if (elements.length > 0) { return new(tree.Selector)(elements) } + }, + tag: function () { + return $(/^[a-zA-Z][a-zA-Z-]*[0-9]?/) || $('*'); + }, + attribute: function () { + var attr = '', key, val, op; + + if (! $('[')) return; + + if (key = $(/^[a-zA-Z-]+/) || $(this.entities.quoted)) { + if ((op = $(/^[|~*$^]?=/)) && + (val = $(this.entities.quoted) || $(/^[\w-]+/))) { + attr = [key, op, val.toCSS ? val.toCSS() : val].join(''); + } else { attr = key } + } + + if (! $(']')) return; + + if (attr) { return "[" + attr + "]" } + }, + + // + // The `block` rule is used by `ruleset` and `mixin.definition`. + // It's a wrapper around the `primary` rule, with added `{}`. + // + block: function () { + var content; + + if ($('{') && (content = $(this.primary)) && $('}')) { + return content; + } + }, + + // + // div, .class, body > p {...} + // + ruleset: function () { + var selectors = [], s, rules, match; + save(); + + if (match = /^([.#:% \w-]+)[\s\n]*\{/.exec(chunks[j])) { + i += match[0].length - 1; + selectors = [new(tree.Selector)([new(tree.Element)(null, match[1])])]; + } else { + while (s = $(this.selector)) { + selectors.push(s); + $(this.comment); + if (! $(',')) { break } + $(this.comment); + } + } + + if (selectors.length > 0 && (rules = $(this.block))) { + return new(tree.Ruleset)(selectors, rules); + } else { + // Backtrack + furthest = i; + restore(); + } + }, + rule: function () { + var name, value, c = input.charAt(i), important, match; + save(); + + if (c === '.' || c === '#' || c === '&') { return } + + if (name = $(this.variable) || $(this.property)) { + if ((name.charAt(0) != '@') && (match = /^([^@+\/'"*`(;{}-]*);/.exec(chunks[j]))) { + i += match[0].length - 1; + value = new(tree.Anonymous)(match[1]); + } else if (name === "font") { + value = $(this.font); + } else { + value = $(this.value); + } + important = $(this.important); + + if (value && $(this.end)) { + return new(tree.Rule)(name, value, important, memo); + } else { + furthest = i; + restore(); + } + } + }, + + // + // An @import directive + // + // @import "lib"; + // + // Depending on our environemnt, importing is done differently: + // In the browser, it's an XHR request, in Node, it would be a + // file-system operation. The function used for importing is + // stored in `import`, which we pass to the Import constructor. + // + "import": function () { + var path; + if ($(/^@import\s+/) && + (path = $(this.entities.quoted) || $(this.entities.url)) && + $(';')) { + return new(tree.Import)(path, imports); + } + }, + + // + // A CSS Directive + // + // @charset "utf-8"; + // + directive: function () { + var name, value, rules, types; + + if (input.charAt(i) !== '@') return; + + if (value = $(this['import'])) { + return value; + } else if (name = $(/^@media|@page/) || $(/^@(?:-webkit-)?keyframes/)) { + types = ($(/^[^{]+/) || '').trim(); + if (rules = $(this.block)) { + return new(tree.Directive)(name + " " + types, rules); + } + } else if (name = $(/^@[-a-z]+/)) { + if (name === '@font-face') { + if (rules = $(this.block)) { + return new(tree.Directive)(name, rules); + } + } else if ((value = $(this.entity)) && $(';')) { + return new(tree.Directive)(name, value); + } + } + }, + font: function () { + var value = [], expression = [], weight, shorthand, font, e; + + while (e = $(this.shorthand) || $(this.entity)) { + expression.push(e); + } + value.push(new(tree.Expression)(expression)); + + if ($(',')) { + while (e = $(this.expression)) { + value.push(e); + if (! $(',')) { break } + } + } + return new(tree.Value)(value); + }, + + // + // A Value is a comma-delimited list of Expressions + // + // font-family: Baskerville, Georgia, serif; + // + // In a Rule, a Value represents everything after the `:`, + // and before the `;`. + // + value: function () { + var e, expressions = [], important; + + while (e = $(this.expression)) { + expressions.push(e); + if (! $(',')) { break } + } + + if (expressions.length > 0) { + return new(tree.Value)(expressions); + } + }, + important: function () { + if (input.charAt(i) === '!') { + return $(/^! *important/); + } + }, + sub: function () { + var e; + + if ($('(') && (e = $(this.expression)) && $(')')) { + return e; + } + }, + multiplication: function () { + var m, a, op, operation; + if (m = $(this.operand)) { + while ((op = ($('/') || $('*'))) && (a = $(this.operand))) { + operation = new(tree.Operation)(op, [operation || m, a]); + } + return operation || m; + } + }, + addition: function () { + var m, a, op, operation; + if (m = $(this.multiplication)) { + while ((op = $(/^[-+]\s+/) || (input.charAt(i - 1) != ' ' && ($('+') || $('-')))) && + (a = $(this.multiplication))) { + operation = new(tree.Operation)(op, [operation || m, a]); + } + return operation || m; + } + }, + + // + // An operand is anything that can be part of an operation, + // such as a Color, or a Variable + // + operand: function () { + var negate, p = input.charAt(i + 1); + + if (input.charAt(i) === '-' && (p === '@' || p === '(')) { negate = $('-') } + var o = $(this.sub) || $(this.entities.dimension) || + $(this.entities.color) || $(this.entities.variable) || + $(this.entities.call); + return negate ? new(tree.Operation)('*', [new(tree.Dimension)(-1), o]) + : o; + }, + + // + // Expressions either represent mathematical operations, + // or white-space delimited Entities. + // + // 1px solid black + // @var * 2 + // + expression: function () { + var e, delim, entities = [], d; + + while (e = $(this.addition) || $(this.entity)) { + entities.push(e); + } + if (entities.length > 0) { + return new(tree.Expression)(entities); + } + }, + property: function () { + var name; + + if (name = $(/^(\*?-?[-a-z_0-9]+)\s*:/)) { + return name[1]; + } + } + } + }; +}; + +if (typeof(window) !== 'undefined') { + // + // Used by `@import` directives + // + less.Parser.importer = function (path, paths, callback, env) { + if (path.charAt(0) !== '/' && paths.length > 0) { + path = paths[0] + path; + } + // We pass `true` as 3rd argument, to force the reload of the import. + // This is so we can get the syntax tree as opposed to just the CSS output, + // as we need this to evaluate the current stylesheet. + loadStyleSheet({ href: path, title: path, type: env.mime }, callback, true); + }; +} + +(function (tree) { + +tree.functions = { + rgb: function (r, g, b) { + return this.rgba(r, g, b, 1.0); + }, + rgba: function (r, g, b, a) { + var rgb = [r, g, b].map(function (c) { return number(c) }), + a = number(a); + return new(tree.Color)(rgb, a); + }, + hsl: function (h, s, l) { + return this.hsla(h, s, l, 1.0); + }, + hsla: function (h, s, l, a) { + h = (number(h) % 360) / 360; + s = number(s); l = number(l); a = number(a); + + var m2 = l <= 0.5 ? l * (s + 1) : l + s - l * s; + var m1 = l * 2 - m2; + + return this.rgba(hue(h + 1/3) * 255, + hue(h) * 255, + hue(h - 1/3) * 255, + a); + + function hue(h) { + h = h < 0 ? h + 1 : (h > 1 ? h - 1 : h); + if (h * 6 < 1) return m1 + (m2 - m1) * h * 6; + else if (h * 2 < 1) return m2; + else if (h * 3 < 2) return m1 + (m2 - m1) * (2/3 - h) * 6; + else return m1; + } + }, + hue: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().h)); + }, + saturation: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().s * 100), '%'); + }, + lightness: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().l * 100), '%'); + }, + alpha: function (color) { + return new(tree.Dimension)(color.toHSL().a); + }, + saturate: function (color, amount) { + var hsl = color.toHSL(); + + hsl.s += amount.value / 100; + hsl.s = clamp(hsl.s); + return hsla(hsl); + }, + desaturate: function (color, amount) { + var hsl = color.toHSL(); + + hsl.s -= amount.value / 100; + hsl.s = clamp(hsl.s); + return hsla(hsl); + }, + lighten: function (color, amount) { + var hsl = color.toHSL(); + + hsl.l += amount.value / 100; + hsl.l = clamp(hsl.l); + return hsla(hsl); + }, + darken: function (color, amount) { + var hsl = color.toHSL(); + + hsl.l -= amount.value / 100; + hsl.l = clamp(hsl.l); + return hsla(hsl); + }, + fadein: function (color, amount) { + var hsl = color.toHSL(); + + hsl.a += amount.value / 100; + hsl.a = clamp(hsl.a); + return hsla(hsl); + }, + fadeout: function (color, amount) { + var hsl = color.toHSL(); + + hsl.a -= amount.value / 100; + hsl.a = clamp(hsl.a); + return hsla(hsl); + }, + spin: function (color, amount) { + var hsl = color.toHSL(); + var hue = (hsl.h + amount.value) % 360; + + hsl.h = hue < 0 ? 360 + hue : hue; + + return hsla(hsl); + }, + // + // Copyright (c) 2006-2009 Hampton Catlin, Nathan Weizenbaum, and Chris Eppstein + // http://sass-lang.com + // + mix: function (color1, color2, weight) { + var p = weight.value / 100.0; + var w = p * 2 - 1; + var a = color1.toHSL().a - color2.toHSL().a; + + var w1 = (((w * a == -1) ? w : (w + a) / (1 + w * a)) + 1) / 2.0; + var w2 = 1 - w1; + + var rgb = [color1.rgb[0] * w1 + color2.rgb[0] * w2, + color1.rgb[1] * w1 + color2.rgb[1] * w2, + color1.rgb[2] * w1 + color2.rgb[2] * w2]; + + var alpha = color1.alpha * p + color2.alpha * (1 - p); + + return new(tree.Color)(rgb, alpha); + }, + greyscale: function (color) { + return this.desaturate(color, new(tree.Dimension)(100)); + }, + e: function (str) { + return new(tree.Anonymous)(str instanceof tree.JavaScript ? str.evaluated : str); + }, + escape: function (str) { + return new(tree.Anonymous)(encodeURI(str.value).replace(/=/g, "%3D").replace(/:/g, "%3A").replace(/#/g, "%23").replace(/;/g, "%3B").replace(/\(/g, "%28").replace(/\)/g, "%29")); + }, + '%': function (quoted /* arg, arg, ...*/) { + var args = Array.prototype.slice.call(arguments, 1), + str = quoted.value; + + for (var i = 0; i < args.length; i++) { + str = str.replace(/%[sda]/i, function(token) { + var value = token.match(/s/i) ? args[i].value : args[i].toCSS(); + return token.match(/[A-Z]$/) ? encodeURIComponent(value) : value; + }); + } + str = str.replace(/%%/g, '%'); + return new(tree.Quoted)('"' + str + '"', str); + }, + round: function (n) { + if (n instanceof tree.Dimension) { + return new(tree.Dimension)(Math.round(number(n)), n.unit); + } else if (typeof(n) === 'number') { + return Math.round(n); + } else { + throw { + error: "RuntimeError", + message: "math functions take numbers as parameters" + }; + } + } +}; + +function hsla(hsla) { + return tree.functions.hsla(hsla.h, hsla.s, hsla.l, hsla.a); +} + +function number(n) { + if (n instanceof tree.Dimension) { + return parseFloat(n.unit == '%' ? n.value / 100 : n.value); + } else if (typeof(n) === 'number') { + return n; + } else { + throw { + error: "RuntimeError", + message: "color functions take numbers as parameters" + }; + } +} + +function clamp(val) { + return Math.min(1, Math.max(0, val)); +} + +})(require('less/tree')); +(function (tree) { + +tree.Alpha = function (val) { + this.value = val; +}; +tree.Alpha.prototype = { + toCSS: function () { + return "alpha(opacity=" + + (this.value.toCSS ? this.value.toCSS() : this.value) + ")"; + }, + eval: function (env) { + if (this.value.eval) { this.value = this.value.eval(env) } + return this; + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Anonymous = function (string) { + this.value = string.value || string; +}; +tree.Anonymous.prototype = { + toCSS: function () { + return this.value; + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +// +// A function call node. +// +tree.Call = function (name, args, index) { + this.name = name; + this.args = args; + this.index = index; +}; +tree.Call.prototype = { + // + // When evaluating a function call, + // we either find the function in `tree.functions` [1], + // in which case we call it, passing the evaluated arguments, + // or we simply print it out as it appeared originally [2]. + // + // The *functions.js* file contains the built-in functions. + // + // The reason why we evaluate the arguments, is in the case where + // we try to pass a variable to a function, like: `saturate(@color)`. + // The function should receive the value, not the variable. + // + eval: function (env) { + var args = this.args.map(function (a) { return a.eval(env) }); + + if (this.name in tree.functions) { // 1. + try { + return tree.functions[this.name].apply(tree.functions, args); + } catch (e) { + throw { message: "error evaluating function `" + this.name + "`", + index: this.index }; + } + } else { // 2. + return new(tree.Anonymous)(this.name + + "(" + args.map(function (a) { return a.toCSS() }).join(', ') + ")"); + } + }, + + toCSS: function (env) { + return this.eval(env).toCSS(); + } +}; + +})(require('less/tree')); +(function (tree) { +// +// RGB Colors - #ff0014, #eee +// +tree.Color = function (rgb, a) { + // + // The end goal here, is to parse the arguments + // into an integer triplet, such as `128, 255, 0` + // + // This facilitates operations and conversions. + // + if (Array.isArray(rgb)) { + this.rgb = rgb; + } else if (rgb.length == 6) { + this.rgb = rgb.match(/.{2}/g).map(function (c) { + return parseInt(c, 16); + }); + } else if (rgb.length == 8) { + this.alpha = parseInt(rgb.substring(0,2), 16) / 255.0; + this.rgb = rgb.substr(2).match(/.{2}/g).map(function (c) { + return parseInt(c, 16); + }); + } else { + this.rgb = rgb.split('').map(function (c) { + return parseInt(c + c, 16); + }); + } + this.alpha = typeof(a) === 'number' ? a : 1; +}; +tree.Color.prototype = { + eval: function () { return this }, + + // + // If we have some transparency, the only way to represent it + // is via `rgba`. Otherwise, we use the hex representation, + // which has better compatibility with older browsers. + // Values are capped between `0` and `255`, rounded and zero-padded. + // + toCSS: function () { + if (this.alpha < 1.0) { + return "rgba(" + this.rgb.map(function (c) { + return Math.round(c); + }).concat(this.alpha).join(', ') + ")"; + } else { + return '#' + this.rgb.map(function (i) { + i = Math.round(i); + i = (i > 255 ? 255 : (i < 0 ? 0 : i)).toString(16); + return i.length === 1 ? '0' + i : i; + }).join(''); + } + }, + + // + // Operations have to be done per-channel, if not, + // channels will spill onto each other. Once we have + // our result, in the form of an integer triplet, + // we create a new Color node to hold the result. + // + operate: function (op, other) { + var result = []; + + if (! (other instanceof tree.Color)) { + other = other.toColor(); + } + + for (var c = 0; c < 3; c++) { + result[c] = tree.operate(op, this.rgb[c], other.rgb[c]); + } + return new(tree.Color)(result, this.alpha + other.alpha); + }, + + toHSL: function () { + var r = this.rgb[0] / 255, + g = this.rgb[1] / 255, + b = this.rgb[2] / 255, + a = this.alpha; + + var max = Math.max(r, g, b), min = Math.min(r, g, b); + var h, s, l = (max + min) / 2, d = max - min; + + if (max === min) { + h = s = 0; + } else { + s = l > 0.5 ? d / (2 - max - min) : d / (max + min); + + switch (max) { + case r: h = (g - b) / d + (g < b ? 6 : 0); break; + case g: h = (b - r) / d + 2; break; + case b: h = (r - g) / d + 4; break; + } + h /= 6; + } + return { h: h * 360, s: s, l: l, a: a }; + } +}; + + +})(require('less/tree')); +(function (tree) { + +tree.Comment = function (value, silent) { + this.value = value; + this.silent = !!silent; +}; +tree.Comment.prototype = { + toCSS: function (env) { + return env.compress ? '' : this.value; + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +// +// A number with a unit +// +tree.Dimension = function (value, unit) { + this.value = parseFloat(value); + this.unit = unit || null; +}; + +tree.Dimension.prototype = { + eval: function () { return this }, + toColor: function () { + return new(tree.Color)([this.value, this.value, this.value]); + }, + toCSS: function () { + var css = this.value + this.unit; + return css; + }, + + // In an operation between two Dimensions, + // we default to the first Dimension's unit, + // so `1px + 2em` will yield `3px`. + // In the future, we could implement some unit + // conversions such that `100cm + 10mm` would yield + // `101cm`. + operate: function (op, other) { + return new(tree.Dimension) + (tree.operate(op, this.value, other.value), + this.unit || other.unit); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Directive = function (name, value) { + this.name = name; + if (Array.isArray(value)) { + this.ruleset = new(tree.Ruleset)([], value); + } else { + this.value = value; + } +}; +tree.Directive.prototype = { + toCSS: function (ctx, env) { + if (this.ruleset) { + this.ruleset.root = true; + return this.name + (env.compress ? '{' : ' {\n ') + + this.ruleset.toCSS(ctx, env).trim().replace(/\n/g, '\n ') + + (env.compress ? '}': '\n}\n'); + } else { + return this.name + ' ' + this.value.toCSS() + ';\n'; + } + }, + eval: function (env) { + env.frames.unshift(this); + this.ruleset = this.ruleset && this.ruleset.eval(env); + env.frames.shift(); + return this; + }, + variable: function (name) { return tree.Ruleset.prototype.variable.call(this.ruleset, name) }, + find: function () { return tree.Ruleset.prototype.find.apply(this.ruleset, arguments) }, + rulesets: function () { return tree.Ruleset.prototype.rulesets.apply(this.ruleset) } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Element = function (combinator, value) { + this.combinator = combinator instanceof tree.Combinator ? + combinator : new(tree.Combinator)(combinator); + this.value = value ? value.trim() : ""; +}; +tree.Element.prototype.toCSS = function (env) { + return this.combinator.toCSS(env || {}) + this.value; +}; + +tree.Combinator = function (value) { + if (value === ' ') { + this.value = ' '; + } else if (value === '& ') { + this.value = '& '; + } else { + this.value = value ? value.trim() : ""; + } +}; +tree.Combinator.prototype.toCSS = function (env) { + return { + '' : '', + ' ' : ' ', + '&' : '', + '& ' : ' ', + ':' : ' :', + '::': '::', + '+' : env.compress ? '+' : ' + ', + '~' : env.compress ? '~' : ' ~ ', + '>' : env.compress ? '>' : ' > ' + }[this.value]; +}; + +})(require('less/tree')); +(function (tree) { + +tree.Expression = function (value) { this.value = value }; +tree.Expression.prototype = { + eval: function (env) { + if (this.value.length > 1) { + return new(tree.Expression)(this.value.map(function (e) { + return e.eval(env); + })); + } else if (this.value.length === 1) { + return this.value[0].eval(env); + } else { + return this; + } + }, + toCSS: function (env) { + return this.value.map(function (e) { + return e.toCSS(env); + }).join(' '); + } +}; + +})(require('less/tree')); +(function (tree) { +// +// CSS @import node +// +// The general strategy here is that we don't want to wait +// for the parsing to be completed, before we start importing +// the file. That's because in the context of a browser, +// most of the time will be spent waiting for the server to respond. +// +// On creation, we push the import path to our import queue, though +// `import,push`, we also pass it a callback, which it'll call once +// the file has been fetched, and parsed. +// +tree.Import = function (path, imports) { + var that = this; + + this._path = path; + + // The '.less' extension is optional + if (path instanceof tree.Quoted) { + this.path = /\.(le?|c)ss$/.test(path.value) ? path.value : path.value + '.less'; + } else { + this.path = path.value.value || path.value; + } + + this.css = /css$/.test(this.path); + + // Only pre-compile .less files + if (! this.css) { + imports.push(this.path, function (root) { + if (! root) { + throw new(Error)("Error parsing " + that.path); + } + that.root = root; + }); + } +}; + +// +// The actual import node doesn't return anything, when converted to CSS. +// The reason is that it's used at the evaluation stage, so that the rules +// it imports can be treated like any other rules. +// +// In `eval`, we make sure all Import nodes get evaluated, recursively, so +// we end up with a flat structure, which can easily be imported in the parent +// ruleset. +// +tree.Import.prototype = { + toCSS: function () { + if (this.css) { + return "@import " + this._path.toCSS() + ';\n'; + } else { + return ""; + } + }, + eval: function (env) { + var ruleset; + + if (this.css) { + return this; + } else { + ruleset = new(tree.Ruleset)(null, this.root.rules.slice(0)); + + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.Import) { + Array.prototype + .splice + .apply(ruleset.rules, + [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + return ruleset.rules; + } + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.JavaScript = function (string, index, escaped) { + this.escaped = escaped; + this.expression = string; + this.index = index; +}; +tree.JavaScript.prototype = { + eval: function (env) { + var result, + that = this, + context = {}; + + var expression = this.expression.replace(/@\{([\w-]+)\}/g, function (_, name) { + return tree.jsify(new(tree.Variable)('@' + name, that.index).eval(env)); + }); + + try { + expression = new(Function)('return (' + expression + ')'); + } catch (e) { + throw { message: "JavaScript evaluation error: `" + expression + "`" , + index: this.index }; + } + + for (var k in env.frames[0].variables()) { + context[k.slice(1)] = { + value: env.frames[0].variables()[k].value, + toJS: function () { + return this.value.eval(env).toCSS(); + } + }; + } + + try { + result = expression.call(context); + } catch (e) { + throw { message: "JavaScript evaluation error: '" + e.name + ': ' + e.message + "'" , + index: this.index }; + } + if (typeof(result) === 'string') { + return new(tree.Quoted)('"' + result + '"', result, this.escaped, this.index); + } else if (Array.isArray(result)) { + return new(tree.Anonymous)(result.join(', ')); + } else { + return new(tree.Anonymous)(result); + } + } +}; + +})(require('less/tree')); + +(function (tree) { + +tree.Keyword = function (value) { this.value = value }; +tree.Keyword.prototype = { + eval: function () { return this }, + toCSS: function () { return this.value } +}; + +})(require('less/tree')); +(function (tree) { + +tree.mixin = {}; +tree.mixin.Call = function (elements, args, index) { + this.selector = new(tree.Selector)(elements); + this.arguments = args; + this.index = index; +}; +tree.mixin.Call.prototype = { + eval: function (env) { + var mixins, args, rules = [], match = false; + + for (var i = 0; i < env.frames.length; i++) { + if ((mixins = env.frames[i].find(this.selector)).length > 0) { + args = this.arguments && this.arguments.map(function (a) { return a.eval(env) }); + for (var m = 0; m < mixins.length; m++) { + if (mixins[m].match(args, env)) { + try { + Array.prototype.push.apply( + rules, mixins[m].eval(env, this.arguments).rules); + match = true; + } catch (e) { + throw { message: e.message, index: e.index, stack: e.stack, call: this.index }; + } + } + } + if (match) { + return rules; + } else { + throw { message: 'No matching definition was found for `' + + this.selector.toCSS().trim() + '(' + + this.arguments.map(function (a) { + return a.toCSS(); + }).join(', ') + ")`", + index: this.index }; + } + } + } + throw { message: this.selector.toCSS().trim() + " is undefined", + index: this.index }; + } +}; + +tree.mixin.Definition = function (name, params, rules) { + this.name = name; + this.selectors = [new(tree.Selector)([new(tree.Element)(null, name)])]; + this.params = params; + this.arity = params.length; + this.rules = rules; + this._lookups = {}; + this.required = params.reduce(function (count, p) { + if (!p.name || (p.name && !p.value)) { return count + 1 } + else { return count } + }, 0); + this.parent = tree.Ruleset.prototype; + this.frames = []; +}; +tree.mixin.Definition.prototype = { + toCSS: function () { return "" }, + variable: function (name) { return this.parent.variable.call(this, name) }, + variables: function () { return this.parent.variables.call(this) }, + find: function () { return this.parent.find.apply(this, arguments) }, + rulesets: function () { return this.parent.rulesets.apply(this) }, + + eval: function (env, args) { + var frame = new(tree.Ruleset)(null, []), context, _arguments = []; + + for (var i = 0, val; i < this.params.length; i++) { + if (this.params[i].name) { + if (val = (args && args[i]) || this.params[i].value) { + frame.rules.unshift(new(tree.Rule)(this.params[i].name, val.eval(env))); + } else { + throw { message: "wrong number of arguments for " + this.name + + ' (' + args.length + ' for ' + this.arity + ')' }; + } + } + } + for (var i = 0; i < Math.max(this.params.length, args && args.length); i++) { + _arguments.push(args[i] || this.params[i].value); + } + frame.rules.unshift(new(tree.Rule)('@arguments', new(tree.Expression)(_arguments).eval(env))); + + return new(tree.Ruleset)(null, this.rules.slice(0)).eval({ + frames: [this, frame].concat(this.frames, env.frames) + }); + }, + match: function (args, env) { + var argsLength = (args && args.length) || 0, len; + + if (argsLength < this.required) { return false } + if ((this.required > 0) && (argsLength > this.params.length)) { return false } + + len = Math.min(argsLength, this.arity); + + for (var i = 0; i < len; i++) { + if (!this.params[i].name) { + if (args[i].eval(env).toCSS() != this.params[i].value.eval(env).toCSS()) { + return false; + } + } + } + return true; + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Operation = function (op, operands) { + this.op = op.trim(); + this.operands = operands; +}; +tree.Operation.prototype.eval = function (env) { + var a = this.operands[0].eval(env), + b = this.operands[1].eval(env), + temp; + + if (a instanceof tree.Dimension && b instanceof tree.Color) { + if (this.op === '*' || this.op === '+') { + temp = b, b = a, a = temp; + } else { + throw { name: "OperationError", + message: "Can't substract or divide a color from a number" }; + } + } + return a.operate(this.op, b); +}; + +tree.operate = function (op, a, b) { + switch (op) { + case '+': return a + b; + case '-': return a - b; + case '*': return a * b; + case '/': return a / b; + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Quoted = function (str, content, escaped, i) { + this.escaped = escaped; + this.value = content || ''; + this.quote = str.charAt(0); + this.index = i; +}; +tree.Quoted.prototype = { + toCSS: function () { + if (this.escaped) { + return this.value; + } else { + return this.quote + this.value + this.quote; + } + }, + eval: function (env) { + var that = this; + var value = this.value.replace(/`([^`]+)`/g, function (_, exp) { + return new(tree.JavaScript)(exp, that.index, true).eval(env).value; + }).replace(/@\{([\w-]+)\}/g, function (_, name) { + var v = new(tree.Variable)('@' + name, that.index).eval(env); + return v.value || v.toCSS(); + }); + return new(tree.Quoted)(this.quote + value + this.quote, value, this.escaped, this.index); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Rule = function (name, value, important, index) { + this.name = name; + this.value = (value instanceof tree.Value) ? value : new(tree.Value)([value]); + this.important = important ? ' ' + important.trim() : ''; + this.index = index; + + if (name.charAt(0) === '@') { + this.variable = true; + } else { this.variable = false } +}; +tree.Rule.prototype.toCSS = function (env) { + if (this.variable) { return "" } + else { + return this.name + (env.compress ? ':' : ': ') + + this.value.toCSS(env) + + this.important + ";"; + } +}; + +tree.Rule.prototype.eval = function (context) { + return new(tree.Rule)(this.name, this.value.eval(context), this.important, this.index); +}; + +tree.Shorthand = function (a, b) { + this.a = a; + this.b = b; +}; + +tree.Shorthand.prototype = { + toCSS: function (env) { + return this.a.toCSS(env) + "/" + this.b.toCSS(env); + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Ruleset = function (selectors, rules) { + this.selectors = selectors; + this.rules = rules; + this._lookups = {}; +}; +tree.Ruleset.prototype = { + eval: function (env) { + var ruleset = new(tree.Ruleset)(this.selectors, this.rules.slice(0)); + + ruleset.root = this.root; + + // push the current ruleset to the frames stack + env.frames.unshift(ruleset); + + // Evaluate imports + if (ruleset.root) { + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.Import) { + Array.prototype.splice + .apply(ruleset.rules, [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + } + + // Store the frames around mixin definitions, + // so they can be evaluated like closures when the time comes. + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.mixin.Definition) { + ruleset.rules[i].frames = env.frames.slice(0); + } + } + + // Evaluate mixin calls. + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.mixin.Call) { + Array.prototype.splice + .apply(ruleset.rules, [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + + // Evaluate everything else + for (var i = 0, rule; i < ruleset.rules.length; i++) { + rule = ruleset.rules[i]; + + if (! (rule instanceof tree.mixin.Definition)) { + ruleset.rules[i] = rule.eval ? rule.eval(env) : rule; + } + } + + // Pop the stack + env.frames.shift(); + + return ruleset; + }, + match: function (args) { + return !args || args.length === 0; + }, + variables: function () { + if (this._variables) { return this._variables } + else { + return this._variables = this.rules.reduce(function (hash, r) { + if (r instanceof tree.Rule && r.variable === true) { + hash[r.name] = r; + } + return hash; + }, {}); + } + }, + variable: function (name) { + return this.variables()[name]; + }, + rulesets: function () { + if (this._rulesets) { return this._rulesets } + else { + return this._rulesets = this.rules.filter(function (r) { + return (r instanceof tree.Ruleset) || (r instanceof tree.mixin.Definition); + }); + } + }, + find: function (selector, self) { + self = self || this; + var rules = [], rule, match, + key = selector.toCSS(); + + if (key in this._lookups) { return this._lookups[key] } + + this.rulesets().forEach(function (rule) { + if (rule !== self) { + for (var j = 0; j < rule.selectors.length; j++) { + if (match = selector.match(rule.selectors[j])) { + if (selector.elements.length > 1) { + Array.prototype.push.apply(rules, rule.find( + new(tree.Selector)(selector.elements.slice(1)), self)); + } else { + rules.push(rule); + } + break; + } + } + } + }); + return this._lookups[key] = rules; + }, + // + // Entry point for code generation + // + // `context` holds an array of arrays. + // + toCSS: function (context, env) { + var css = [], // The CSS output + rules = [], // node.Rule instances + rulesets = [], // node.Ruleset instances + paths = [], // Current selectors + selector, // The fully rendered selector + rule; + + if (! this.root) { + if (context.length === 0) { + paths = this.selectors.map(function (s) { return [s] }); + } else { + this.joinSelectors( paths, context, this.selectors ); + } + } + + // Compile rules and rulesets + for (var i = 0; i < this.rules.length; i++) { + rule = this.rules[i]; + + if (rule.rules || (rule instanceof tree.Directive)) { + rulesets.push(rule.toCSS(paths, env)); + } else if (rule instanceof tree.Comment) { + if (!rule.silent) { + if (this.root) { + rulesets.push(rule.toCSS(env)); + } else { + rules.push(rule.toCSS(env)); + } + } + } else { + if (rule.toCSS && !rule.variable) { + rules.push(rule.toCSS(env)); + } else if (rule.value && !rule.variable) { + rules.push(rule.value.toString()); + } + } + } + + rulesets = rulesets.join(''); + + // If this is the root node, we don't render + // a selector, or {}. + // Otherwise, only output if this ruleset has rules. + if (this.root) { + css.push(rules.join(env.compress ? '' : '\n')); + } else { + if (rules.length > 0) { + selector = paths.map(function (p) { + return p.map(function (s) { + return s.toCSS(env); + }).join('').trim(); + }).join(env.compress ? ',' : (paths.length > 3 ? ',\n' : ', ')); + css.push(selector, + (env.compress ? '{' : ' {\n ') + + rules.join(env.compress ? '' : '\n ') + + (env.compress ? '}' : '\n}\n')); + } + } + css.push(rulesets); + + return css.join('') + (env.compress ? '\n' : ''); + }, + + joinSelectors: function (paths, context, selectors) { + for (var s = 0; s < selectors.length; s++) { + this.joinSelector(paths, context, selectors[s]); + } + }, + + joinSelector: function (paths, context, selector) { + var before = [], after = [], beforeElements = [], + afterElements = [], hasParentSelector = false, el; + + for (var i = 0; i < selector.elements.length; i++) { + el = selector.elements[i]; + if (el.combinator.value[0] === '&') { + hasParentSelector = true; + } + if (hasParentSelector) afterElements.push(el); + else beforeElements.push(el); + } + + if (! hasParentSelector) { + afterElements = beforeElements; + beforeElements = []; + } + + if (beforeElements.length > 0) { + before.push(new(tree.Selector)(beforeElements)); + } + + if (afterElements.length > 0) { + after.push(new(tree.Selector)(afterElements)); + } + + for (var c = 0; c < context.length; c++) { + paths.push(before.concat(context[c]).concat(after)); + } + } +}; +})(require('less/tree')); +(function (tree) { + +tree.Selector = function (elements) { + this.elements = elements; + if (this.elements[0].combinator.value === "") { + this.elements[0].combinator.value = ' '; + } +}; +tree.Selector.prototype.match = function (other) { + if (this.elements[0].value === other.elements[0].value) { + return true; + } else { + return false; + } +}; +tree.Selector.prototype.toCSS = function (env) { + if (this._css) { return this._css } + + return this._css = this.elements.map(function (e) { + if (typeof(e) === 'string') { + return ' ' + e.trim(); + } else { + return e.toCSS(env); + } + }).join(''); +}; + +})(require('less/tree')); +(function (tree) { + +tree.URL = function (val, paths) { + if (val.data) { + this.attrs = val; + } else { + // Add the base path if the URL is relative and we are in the browser + if (!/^(?:https?:\/|file:\/|data:\/)?\//.test(val.value) && paths.length > 0 && typeof(window) !== 'undefined') { + val.value = paths[0] + (val.value.charAt(0) === '/' ? val.value.slice(1) : val.value); + } + this.value = val; + this.paths = paths; + } +}; +tree.URL.prototype = { + toCSS: function () { + return "url(" + (this.attrs ? 'data:' + this.attrs.mime + this.attrs.charset + this.attrs.base64 + this.attrs.data + : this.value.toCSS()) + ")"; + }, + eval: function (ctx) { + return this.attrs ? this : new(tree.URL)(this.value.eval(ctx), this.paths); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Value = function (value) { + this.value = value; + this.is = 'value'; +}; +tree.Value.prototype = { + eval: function (env) { + if (this.value.length === 1) { + return this.value[0].eval(env); + } else { + return new(tree.Value)(this.value.map(function (v) { + return v.eval(env); + })); + } + }, + toCSS: function (env) { + return this.value.map(function (e) { + return e.toCSS(env); + }).join(env.compress ? ',' : ', '); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Variable = function (name, index) { this.name = name, this.index = index }; +tree.Variable.prototype = { + eval: function (env) { + var variable, v, name = this.name; + + if (name.indexOf('@@') == 0) { + name = '@' + new(tree.Variable)(name.slice(1)).eval(env).value; + } + + if (variable = tree.find(env.frames, function (frame) { + if (v = frame.variable(name)) { + return v.value.eval(env); + } + })) { return variable } + else { + throw { message: "variable " + name + " is undefined", + index: this.index }; + } + } +}; + +})(require('less/tree')); +require('less/tree').find = function (obj, fun) { + for (var i = 0, r; i < obj.length; i++) { + if (r = fun.call(obj, obj[i])) { return r } + } + return null; +}; +require('less/tree').jsify = function (obj) { + if (Array.isArray(obj.value) && (obj.value.length > 1)) { + return '[' + obj.value.map(function (v) { return v.toCSS(false) }).join(', ') + ']'; + } else { + return obj.toCSS(false); + } +}; +// +// browser.js - client-side engine +// + +var isFileProtocol = (location.protocol === 'file:' || + location.protocol === 'chrome:' || + location.protocol === 'chrome-extension:' || + location.protocol === 'resource:'); + +less.env = less.env || (location.hostname == '127.0.0.1' || + location.hostname == '0.0.0.0' || + location.hostname == 'localhost' || + location.port.length > 0 || + isFileProtocol ? 'development' + : 'production'); + +// Load styles asynchronously (default: false) +// +// This is set to `false` by default, so that the body +// doesn't start loading before the stylesheets are parsed. +// Setting this to `true` can result in flickering. +// +less.async = false; + +// Interval between watch polls +less.poll = less.poll || (isFileProtocol ? 1000 : 1500); + +// +// Watch mode +// +less.watch = function () { return this.watchMode = true }; +less.unwatch = function () { return this.watchMode = false }; + +if (less.env === 'development') { + less.optimization = 0; + + if (/!watch/.test(location.hash)) { + less.watch(); + } + less.watchTimer = setInterval(function () { + if (less.watchMode) { + loadStyleSheets(function (root, sheet, env) { + if (root) { + createCSS(root.toCSS(), sheet, env.lastModified); + } + }); + } + }, less.poll); +} else { + less.optimization = 3; +} + +var cache; + +try { + cache = (typeof(window.localStorage) === 'undefined') ? null : window.localStorage; +} catch (_) { + cache = null; +} + +// +// Get all tags with the 'rel' attribute set to "stylesheet/less" +// +var links = document.getElementsByTagName('link'); +var typePattern = /^text\/(x-)?less$/; + +less.sheets = []; + +for (var i = 0; i < links.length; i++) { + if (links[i].rel === 'stylesheet/less' || (links[i].rel.match(/stylesheet/) && + (links[i].type.match(typePattern)))) { + less.sheets.push(links[i]); + } +} + + +less.refresh = function (reload) { + var startTime, endTime; + startTime = endTime = new(Date); + + loadStyleSheets(function (root, sheet, env) { + if (env.local) { + log("loading " + sheet.href + " from cache."); + } else { + log("parsed " + sheet.href + " successfully."); + createCSS(root.toCSS(), sheet, env.lastModified); + } + log("css for " + sheet.href + " generated in " + (new(Date) - endTime) + 'ms'); + (env.remaining === 0) && log("css generated in " + (new(Date) - startTime) + 'ms'); + endTime = new(Date); + }, reload); + + loadStyles(); +}; +less.refreshStyles = loadStyles; + +less.refresh(less.env === 'development'); + +function loadStyles() { + var styles = document.getElementsByTagName('style'); + for (var i = 0; i < styles.length; i++) { + if (styles[i].type.match(typePattern)) { + new(less.Parser)().parse(styles[i].innerHTML || '', function (e, tree) { + styles[i].type = 'text/css'; + styles[i].innerHTML = tree.toCSS(); + }); + } + } +} + +function loadStyleSheets(callback, reload) { + for (var i = 0; i < less.sheets.length; i++) { + loadStyleSheet(less.sheets[i], callback, reload, less.sheets.length - (i + 1)); + } +} + +function loadStyleSheet(sheet, callback, reload, remaining) { + var url = window.location.href.replace(/[#?].*$/, ''); + var href = sheet.href.replace(/\?.*$/, ''); + var css = cache && cache.getItem(href); + var timestamp = cache && cache.getItem(href + ':timestamp'); + var styles = { css: css, timestamp: timestamp }; + + // Stylesheets in IE don't always return the full path + if (! /^(https?|file):/.test(href)) { + if (href.charAt(0) == "/") { + href = window.location.protocol + "//" + window.location.host + href; + } else { + href = url.slice(0, url.lastIndexOf('/') + 1) + href; + } + } + + xhr(sheet.href, sheet.type, function (data, lastModified) { + if (!reload && styles && lastModified && + (new(Date)(lastModified).valueOf() === + new(Date)(styles.timestamp).valueOf())) { + // Use local copy + createCSS(styles.css, sheet); + callback(null, sheet, { local: true, remaining: remaining }); + } else { + // Use remote copy (re-parse) + try { + new(less.Parser)({ + optimization: less.optimization, + paths: [href.replace(/[\w\.-]+$/, '')], + mime: sheet.type + }).parse(data, function (e, root) { + if (e) { return error(e, href) } + try { + callback(root, sheet, { local: false, lastModified: lastModified, remaining: remaining }); + removeNode(document.getElementById('less-error-message:' + extractId(href))); + } catch (e) { + error(e, href); + } + }); + } catch (e) { + error(e, href); + } + } + }, function (status, url) { + throw new(Error)("Couldn't load " + url + " (" + status + ")"); + }); +} + +function extractId(href) { + return href.replace(/^[a-z]+:\/\/?[^\/]+/, '' ) // Remove protocol & domain + .replace(/^\//, '' ) // Remove root / + .replace(/\?.*$/, '' ) // Remove query + .replace(/\.[^\.\/]+$/, '' ) // Remove file extension + .replace(/[^\.\w-]+/g, '-') // Replace illegal characters + .replace(/\./g, ':'); // Replace dots with colons(for valid id) +} + +function createCSS(styles, sheet, lastModified) { + var css; + + // Strip the query-string + var href = sheet.href ? sheet.href.replace(/\?.*$/, '') : ''; + + // If there is no title set, use the filename, minus the extension + var id = 'less:' + (sheet.title || extractId(href)); + + // If the stylesheet doesn't exist, create a new node + if ((css = document.getElementById(id)) === null) { + css = document.createElement('style'); + css.type = 'text/css'; + css.media = sheet.media || 'screen'; + css.id = id; + document.getElementsByTagName('head')[0].appendChild(css); + } + + if (css.styleSheet) { // IE + try { + css.styleSheet.cssText = styles; + } catch (e) { + throw new(Error)("Couldn't reassign styleSheet.cssText."); + } + } else { + (function (node) { + if (css.childNodes.length > 0) { + if (css.firstChild.nodeValue !== node.nodeValue) { + css.replaceChild(node, css.firstChild); + } + } else { + css.appendChild(node); + } + })(document.createTextNode(styles)); + } + + // Don't update the local store if the file wasn't modified + if (lastModified && cache) { + log('saving ' + href + ' to cache.'); + cache.setItem(href, styles); + cache.setItem(href + ':timestamp', lastModified); + } +} + +function xhr(url, type, callback, errback) { + var xhr = getXMLHttpRequest(); + var async = isFileProtocol ? false : less.async; + + if (typeof(xhr.overrideMimeType) === 'function') { + xhr.overrideMimeType('text/css'); + } + xhr.open('GET', url, async); + xhr.setRequestHeader('Accept', type || 'text/x-less, text/css; q=0.9, */*; q=0.5'); + xhr.send(null); + + if (isFileProtocol) { + if (xhr.status === 0) { + callback(xhr.responseText); + } else { + errback(xhr.status, url); + } + } else if (async) { + xhr.onreadystatechange = function () { + if (xhr.readyState == 4) { + handleResponse(xhr, callback, errback); + } + }; + } else { + handleResponse(xhr, callback, errback); + } + + function handleResponse(xhr, callback, errback) { + if (xhr.status >= 200 && xhr.status < 300) { + callback(xhr.responseText, + xhr.getResponseHeader("Last-Modified")); + } else if (typeof(errback) === 'function') { + errback(xhr.status, url); + } + } +} + +function getXMLHttpRequest() { + if (window.XMLHttpRequest) { + return new(XMLHttpRequest); + } else { + try { + return new(ActiveXObject)("MSXML2.XMLHTTP.3.0"); + } catch (e) { + log("browser doesn't support AJAX."); + return null; + } + } +} + +function removeNode(node) { + return node && node.parentNode.removeChild(node); +} + +function log(str) { + if (less.env == 'development' && typeof(console) !== "undefined") { console.log('less: ' + str) } +} + +function error(e, href) { + var id = 'less-error-message:' + extractId(href); + + var template = ['
      ', + '
    • {0}
    • ', + '
    • {current}
    • ', + '
    • {2}
    • ', + '
    '].join('\n'); + + var elem = document.createElement('div'), timer, content; + + elem.id = id; + elem.className = "less-error-message"; + + content = '

    ' + (e.message || 'There is an error in your .less file') + + '

    ' + '

    ' + href + " "; + + if (e.extract) { + content += 'on line ' + e.line + ', column ' + (e.column + 1) + ':

    ' + + template.replace(/\[(-?\d)\]/g, function (_, i) { + return (parseInt(e.line) + parseInt(i)) || ''; + }).replace(/\{(\d)\}/g, function (_, i) { + return e.extract[parseInt(i)] || ''; + }).replace(/\{current\}/, e.extract[1].slice(0, e.column) + '' + + e.extract[1].slice(e.column) + ''); + } + elem.innerHTML = content; + + // CSS for error messages + createCSS([ + '.less-error-message ul, .less-error-message li {', + 'list-style-type: none;', + 'margin-right: 15px;', + 'padding: 4px 0;', + 'margin: 0;', + '}', + '.less-error-message label {', + 'font-size: 12px;', + 'margin-right: 15px;', + 'padding: 4px 0;', + 'color: #cc7777;', + '}', + '.less-error-message pre {', + 'color: #ee4444;', + 'padding: 4px 0;', + 'margin: 0;', + 'display: inline-block;', + '}', + '.less-error-message pre.ctx {', + 'color: #dd4444;', + '}', + '.less-error-message h3 {', + 'font-size: 20px;', + 'font-weight: bold;', + 'padding: 15px 0 5px 0;', + 'margin: 0;', + '}', + '.less-error-message a {', + 'color: #10a', + '}', + '.less-error-message .error {', + 'color: red;', + 'font-weight: bold;', + 'padding-bottom: 2px;', + 'border-bottom: 1px dashed red;', + '}' + ].join('\n'), { title: 'error-message' }); + + elem.style.cssText = [ + "font-family: Arial, sans-serif", + "border: 1px solid #e00", + "background-color: #eee", + "border-radius: 5px", + "-webkit-border-radius: 5px", + "-moz-border-radius: 5px", + "color: #e00", + "padding: 15px", + "margin-bottom: 15px" + ].join(';'); + + if (less.env == 'development') { + timer = setInterval(function () { + if (document.body) { + if (document.getElementById(id)) { + document.body.replaceChild(elem, document.getElementById(id)); + } else { + document.body.insertBefore(elem, document.body.firstChild); + } + clearInterval(timer); + } + }, 10); + } +} + +})(window); diff --git a/src/dashboard/src/media/vendor/less.js/dist/less-1.1.4.min.js b/src/dashboard/src/media/vendor/less.js/dist/less-1.1.4.min.js new file mode 100644 index 0000000000..182b526fbd --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/dist/less-1.1.4.min.js @@ -0,0 +1,16 @@ +// +// LESS - Leaner CSS v1.1.4 +// http://lesscss.org +// +// Copyright (c) 2009-2011, Alexis Sellier +// Licensed under the Apache 2.0 License. +// +// +// LESS - Leaner CSS v1.1.4 +// http://lesscss.org +// +// Copyright (c) 2009-2011, Alexis Sellier +// Licensed under the Apache 2.0 License. +// +(function(a,b){function u(a,b){var c="less-error-message:"+o(b),e=["
      ",'
    • {0}
    • ',"
    • {current}
    • ",'
    • {2}
    • ',"
    "].join("\n"),f=document.createElement("div"),g,h;f.id=c,f.className="less-error-message",h="

    "+(a.message||"There is an error in your .less file")+"

    "+'

    '+b+" ",a.extract&&(h+="on line "+a.line+", column "+(a.column+1)+":

    "+e.replace(/\[(-?\d)\]/g,function(b,c){return parseInt(a.line)+parseInt(c)||""}).replace(/\{(\d)\}/g,function(b,c){return a.extract[parseInt(c)]||""}).replace(/\{current\}/,a.extract[1].slice(0,a.column)+''+a.extract[1].slice(a.column)+"")),f.innerHTML=h,p([".less-error-message ul, .less-error-message li {","list-style-type: none;","margin-right: 15px;","padding: 4px 0;","margin: 0;","}",".less-error-message label {","font-size: 12px;","margin-right: 15px;","padding: 4px 0;","color: #cc7777;","}",".less-error-message pre {","color: #ee4444;","padding: 4px 0;","margin: 0;","display: inline-block;","}",".less-error-message pre.ctx {","color: #dd4444;","}",".less-error-message h3 {","font-size: 20px;","font-weight: bold;","padding: 15px 0 5px 0;","margin: 0;","}",".less-error-message a {","color: #10a","}",".less-error-message .error {","color: red;","font-weight: bold;","padding-bottom: 2px;","border-bottom: 1px dashed red;","}"].join("\n"),{title:"error-message"}),f.style.cssText=["font-family: Arial, sans-serif","border: 1px solid #e00","background-color: #eee","border-radius: 5px","-webkit-border-radius: 5px","-moz-border-radius: 5px","color: #e00","padding: 15px","margin-bottom: 15px"].join(";"),d.env=="development"&&(g=setInterval(function(){document.body&&(document.getElementById(c)?document.body.replaceChild(f,document.getElementById(c)):document.body.insertBefore(f,document.body.firstChild),clearInterval(g))},10))}function t(a){d.env=="development"&&typeof console!="undefined"&&console.log("less: "+a)}function s(a){return a&&a.parentNode.removeChild(a)}function r(){if(a.XMLHttpRequest)return new XMLHttpRequest;try{return new ActiveXObject("MSXML2.XMLHTTP.3.0")}catch(b){t("browser doesn't support AJAX.");return null}}function q(a,b,c,e){function i(b,c,d){b.status>=200&&b.status<300?c(b.responseText,b.getResponseHeader("Last-Modified")):typeof d=="function"&&d(b.status,a)}var g=r(),h=f?!1:d.async;typeof g.overrideMimeType=="function"&&g.overrideMimeType("text/css"),g.open("GET",a,h),g.setRequestHeader("Accept",b||"text/x-less, text/css; q=0.9, */*; q=0.5"),g.send(null),f?g.status===0?c(g.responseText):e(g.status,a):h?g.onreadystatechange=function(){g.readyState==4&&i(g,c,e)}:i(g,c,e)}function p(a,b,c){var d,e=b.href?b.href.replace(/\?.*$/,""):"",f="less:"+(b.title||o(e));(d=document.getElementById(f))===null&&(d=document.createElement("style"),d.type="text/css",d.media=b.media||"screen",d.id=f,document.getElementsByTagName("head")[0].appendChild(d));if(d.styleSheet)try{d.styleSheet.cssText=a}catch(h){throw new Error("Couldn't reassign styleSheet.cssText.")}else(function(a){d.childNodes.length>0?d.firstChild.nodeValue!==a.nodeValue&&d.replaceChild(a,d.firstChild):d.appendChild(a)})(document.createTextNode(a));c&&g&&(t("saving "+e+" to cache."),g.setItem(e,a),g.setItem(e+":timestamp",c))}function o(a){return a.replace(/^[a-z]+:\/\/?[^\/]+/,"").replace(/^\//,"").replace(/\?.*$/,"").replace(/\.[^\.\/]+$/,"").replace(/[^\.\w-]+/g,"-").replace(/\./g,":")}function n(b,c,e,f){var h=a.location.href.replace(/[#?].*$/,""),i=b.href.replace(/\?.*$/,""),j=g&&g.getItem(i),k=g&&g.getItem(i+":timestamp"),l={css:j,timestamp:k};/^(https?|file):/.test(i)||(i.charAt(0)=="/"?i=a.location.protocol+"//"+a.location.host+i:i=h.slice(0,h.lastIndexOf("/")+1)+i),q(b.href,b.type,function(a,g){if(!e&&l&&g&&(new Date(g)).valueOf()===(new Date(l.timestamp)).valueOf())p(l.css,b),c(null,b,{local:!0,remaining:f});else try{(new d.Parser({optimization:d.optimization,paths:[i.replace(/[\w\.-]+$/,"")],mime:b.type})).parse(a,function(a,d){if(a)return u(a,i);try{c(d,b,{local:!1,lastModified:g,remaining:f}),s(document.getElementById("less-error-message:"+o(i)))}catch(a){u(a,i)}})}catch(h){u(h,i)}},function(a,b){throw new Error("Couldn't load "+b+" ("+a+")")})}function m(a,b){for(var c=0;c>>0;for(var d=0;d>>0,c=Array(b),d=arguments[1];for(var e=0;e>>0,c=0;if(b===0&&arguments.length===1)throw new TypeError;if(arguments.length>=2)var d=arguments[1];else do{if(c in this){d=this[c++];break}if(++c>=b)throw new TypeError}while(!0);for(;c=b)return-1;c<0&&(c+=b);for(;ck&&(j[f]=j[f].slice(c-k),k=c)}function q(){j[f]=g,c=h,k=c}function p(){g=j[f],h=c,k=c}var b,c,f,g,h,i,j,k,l,m=this,n=function(){},o=this.imports={paths:a&&a.paths||[],queue:[],files:{},mime:a&&a.mime,push:function(b,c){var e=this;this.queue.push(b),d.Parser.importer(b,this.paths,function(a){e.queue.splice(e.queue.indexOf(b),1),e.files[b]=a,c(a),e.queue.length===0&&n()},a)}};this.env=a=a||{},this.optimization="optimization"in this.env?this.env.optimization:1,this.env.filename=this.env.filename||null;return l={imports:o,parse:function(d,g){var h,l,m,o,p,q,r=[],t,u=null;c=f=k=i=0,j=[],b=d.replace(/\r\n/g,"\n"),j=function(c){var d=0,e=/[^"'`\{\}\/\(\)]+/g,f=/\/\*(?:[^*]|\*+[^\/*])*\*+\/|\/\/.*/g,g=0,h,i=c[0],j,k;for(var l=0,m,n;l0)throw{type:"Syntax",message:"Missing closing `}`",filename:a.filename};return c.map(function(a){return a.join("")})}([[]]),h=new e.Ruleset([],s(this.parsers.primary)),h.root=!0,h.toCSS=function(c){var d,f,g;return function(g,h){function n(a){return a?(b.slice(0,a).match(/\n/g)||"").length:null}var i=[];g=g||{},typeof h=="object"&&!Array.isArray(h)&&(h=Object.keys(h).map(function(a){var b=h[a];b instanceof e.Value||(b instanceof e.Expression||(b=new e.Expression([b])),b=new e.Value([b]));return new e.Rule("@"+a,b,!1,0)}),i=[new e.Ruleset(null,h)]);try{var j=c.call(this,{frames:i}).toCSS([],{compress:g.compress||!1})}catch(k){f=b.split("\n"),d=n(k.index);for(var l=k.index,m=-1;l>=0&&b.charAt(l)!=="\n";l--)m++;throw{type:k.type,message:k.message,filename:a.filename,index:k.index,line:typeof d=="number"?d+1:null,callLine:k.call&&n(k.call)+1,callExtract:f[n(k.call)],stack:k.stack,column:m,extract:[f[d-1],f[d],f[d+1]]}}return g.compress?j.replace(/(\s)+/g,"$1"):j}}(h.eval);if(c=0&&b.charAt(v)!=="\n";v--)w++;u={name:"ParseError",message:"Syntax Error on line "+p,index:c,filename:a.filename,line:p,column:w,extract:[q[p-2],q[p-1],q[p]]}}this.imports.queue.length>0?n=function(){g(u,h)}:g(u,h)},parsers:{primary:function(){var a,b=[];while((a=s(this.mixin.definition)||s(this.rule)||s(this.ruleset)||s(this.mixin.call)||s(this.comment)||s(this.directive))||s(/^[\s\n]+/))a&&b.push(a);return b},comment:function(){var a;if(b.charAt(c)==="/"){if(b.charAt(c+1)==="/")return new e.Comment(s(/^\/\/.*/),!0);if(a=s(/^\/\*(?:[^*]|\*+[^\/*])*\*+\/\n?/))return new e.Comment(a)}},entities:{quoted:function(){var a,d=c,f;b.charAt(d)==="~"&&(d++,f=!0);if(b.charAt(d)==='"'||b.charAt(d)==="'"){f&&s("~");if(a=s(/^"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'/))return new e.Quoted(a[0],a[1]||a[2],f)}},keyword:function(){var a;if(a=s(/^[A-Za-z-]+/))return new e.Keyword(a)},call:function(){var a,b,d=c;if(!!(a=/^([\w-]+|%)\(/.exec(j[f]))){a=a[1].toLowerCase();if(a==="url")return null;c+=a.length;if(a==="alpha")return s(this.alpha);s("("),b=s(this.entities.arguments);if(!s(")"))return;if(a)return new e.Call(a,b,d)}},arguments:function(){var a=[],b;while(b=s(this.expression)){a.push(b);if(!s(","))break}return a},literal:function(){return s(this.entities.dimension)||s(this.entities.color)||s(this.entities.quoted)},url:function(){var a;if(b.charAt(c)==="u"&&!!s(/^url\(/)){a=s(this.entities.quoted)||s(this.entities.variable)||s(this.entities.dataURI)||s(/^[-\w%@$\/.&=:;#+?~]+/)||"";if(!s(")"))throw new Error("missing closing ) for url()");return new e.URL(a.value||a.data||a instanceof e.Variable?a:new e.Anonymous(a),o.paths)}},dataURI:function(){var a;if(s(/^data:/)){a={},a.mime=s(/^[^\/]+\/[^,;)]+/)||"",a.charset=s(/^;\s*charset=[^,;)]+/)||"",a.base64=s(/^;\s*base64/)||"",a.data=s(/^,\s*[^)]+/);if(a.data)return a}},variable:function(){var a,d=c;if(b.charAt(c)==="@"&&(a=s(/^@@?[\w-]+/)))return new e.Variable(a,d)},color:function(){var a;if(b.charAt(c)==="#"&&(a=s(/^#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})/)))return new e.Color(a[1])},dimension:function(){var a,d=b.charCodeAt(c);if(!(d>57||d<45||d===47))if(a=s(/^(-?\d*\.?\d+)(px|%|em|pc|ex|in|deg|s|ms|pt|cm|mm|rad|grad|turn)?/))return new e.Dimension(a[1],a[2])},javascript:function(){var a,d=c,f;b.charAt(d)==="~"&&(d++,f=!0);if(b.charAt(d)==="`"){f&&s("~");if(a=s(/^`([^`]*)`/))return new e.JavaScript(a[1],c,f)}}},variable:function(){var a;if(b.charAt(c)==="@"&&(a=s(/^(@[\w-]+)\s*:/)))return a[1]},shorthand:function(){var a,b;if(!!t(/^[@\w.%-]+\/[@\w.-]+/)&&(a=s(this.entity))&&s("/")&&(b=s(this.entity)))return new e.Shorthand(a,b)},mixin:{call:function(){var a=[],d,f,g,h=c,i=b.charAt(c);if(i==="."||i==="#"){while(d=s(/^[#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/))a.push(new e.Element(f,d)),f=s(">");s("(")&&(g=s(this.entities.arguments))&&s(")");if(a.length>0&&(s(";")||t("}")))return new e.mixin.Call(a,g,h)}},definition:function(){var a,d=[],f,g,h,i;if(!(b.charAt(c)!=="."&&b.charAt(c)!=="#"||t(/^[^{]*(;|})/)))if(f=s(/^([#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+)\s*\(/)){a=f[1];while(h=s(this.entities.variable)||s(this.entities.literal)||s(this.entities.keyword)){if(h instanceof e.Variable)if(s(":"))if(i=s(this.expression))d.push({name:h.name,value:i});else throw new Error("Expected value");else d.push({name:h.name});else d.push({value:h});if(!s(","))break}if(!s(")"))throw new Error("Expected )");g=s(this.block);if(g)return new e.mixin.Definition(a,d,g)}}},entity:function(){return s(this.entities.literal)||s(this.entities.variable)||s(this.entities.url)||s(this.entities.call)||s(this.entities.keyword)||s(this.entities.javascript)||s(this.comment)},end:function(){return s(";")||t("}")},alpha:function(){var a;if(!!s(/^\(opacity=/i))if(a=s(/^\d+/)||s(this.entities.variable)){if(!s(")"))throw new Error("missing closing ) for alpha()");return new e.Alpha(a)}},element:function(){var a,b,c;c=s(this.combinator),a=s(/^(?:[.#]?|:*)(?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/)||s("*")||s(this.attribute)||s(/^\([^)@]+\)/)||s(/^(?:\d*\.)?\d+%/);if(a)return new e.Element(c,a);if(c.value&&c.value[0]==="&")return new e.Element(c,null)},combinator:function(){var a,d=b.charAt(c);if(d===">"||d==="+"||d==="~"){c++;while(b.charAt(c)===" ")c++;return new e.Combinator(d)}if(d==="&"){a="&",c++,b.charAt(c)===" "&&(a="& ");while(b.charAt(c)===" ")c++;return new e.Combinator(a)}if(d===":"&&b.charAt(c+1)===":"){c+=2;while(b.charAt(c)===" ")c++;return new e.Combinator("::")}return b.charAt(c-1)===" "?new e.Combinator(" "):new e.Combinator(null)},selector:function(){var a,d,f=[],g,h;while(d=s(this.element)){g=b.charAt(c),f.push(d);if(g==="{"||g==="}"||g===";"||g===",")break}if(f.length>0)return new e.Selector(f)},tag:function(){return s(/^[a-zA-Z][a-zA-Z-]*[0-9]?/)||s("*")},attribute:function(){var a="",b,c,d;if(!!s("[")){if(b=s(/^[a-zA-Z-]+/)||s(this.entities.quoted))(d=s(/^[|~*$^]?=/))&&(c=s(this.entities.quoted)||s(/^[\w-]+/))?a=[b,d,c.toCSS?c.toCSS():c].join(""):a=b;if(!s("]"))return;if(a)return"["+a+"]"}},block:function(){var a;if(s("{")&&(a=s(this.primary))&&s("}"))return a},ruleset:function(){var a=[],b,d,g;p();if(g=/^([.#:% \w-]+)[\s\n]*\{/.exec(j[f]))c+=g[0].length-1,a=[new e.Selector([new e.Element(null,g[1])])];else while(b=s(this.selector)){a.push(b),s(this.comment);if(!s(","))break;s(this.comment)}if(a.length>0&&(d=s(this.block)))return new e.Ruleset(a,d);i=c,q()},rule:function(){var a,d,g=b.charAt(c),k,l;p();if(g!=="."&&g!=="#"&&g!=="&")if(a=s(this.variable)||s(this.property)){a.charAt(0)!="@"&&(l=/^([^@+\/'"*`(;{}-]*);/.exec(j[f]))?(c+=l[0].length-1,d=new e.Anonymous(l[1])):a==="font"?d=s(this.font):d=s(this.value),k=s(this.important);if(d&&s(this.end))return new e.Rule(a,d,k,h);i=c,q()}},"import":function(){var a;if(s(/^@import\s+/)&&(a=s(this.entities.quoted)||s(this.entities.url))&&s(";"))return new e.Import(a,o)},directive:function(){var a,d,f,g;if(b.charAt(c)==="@"){if(d=s(this["import"]))return d;if(a=s(/^@media|@page/)||s(/^@(?:-webkit-)?keyframes/)){g=(s(/^[^{]+/)||"").trim();if(f=s(this.block))return new e.Directive(a+" "+g,f)}else if(a=s(/^@[-a-z]+/))if(a==="@font-face"){if(f=s(this.block))return new e.Directive(a,f)}else if((d=s(this.entity))&&s(";"))return new e.Directive(a,d)}},font:function(){var a=[],b=[],c,d,f,g;while(g=s(this.shorthand)||s(this.entity))b.push(g);a.push(new e.Expression(b));if(s(","))while(g=s(this.expression)){a.push(g);if(!s(","))break}return new e.Value(a)},value:function(){var a,b=[],c;while(a=s(this.expression)){b.push(a);if(!s(","))break}if(b.length>0)return new e.Value(b)},important:function(){if(b.charAt(c)==="!")return s(/^! *important/)},sub:function(){var a;if(s("(")&&(a=s(this.expression))&&s(")"))return a},multiplication:function(){var a,b,c,d;if(a=s(this.operand)){while((c=s("/")||s("*"))&&(b=s(this.operand)))d=new e.Operation(c,[d||a,b]);return d||a}},addition:function(){var a,d,f,g;if(a=s(this.multiplication)){while((f=s(/^[-+]\s+/)||b.charAt(c-1)!=" "&&(s("+")||s("-")))&&(d=s(this.multiplication)))g=new e.Operation(f,[g||a,d]);return g||a}},operand:function(){var a,d=b.charAt(c+1);b.charAt(c)==="-"&&(d==="@"||d==="(")&&(a=s("-"));var f=s(this.sub)||s(this.entities.dimension)||s(this.entities.color)||s(this.entities.variable)||s(this.entities.call);return a?new e.Operation("*",[new e.Dimension(-1),f]):f},expression:function(){var a,b,c=[],d;while(a=s(this.addition)||s(this.entity))c.push(a);if(c.length>0)return new e.Expression(c)},property:function(){var a;if(a=s(/^(\*?-?[-a-z_0-9]+)\s*:/))return a[1]}}}},typeof a!="undefined"&&(d.Parser.importer=function(a,b,c,d){a.charAt(0)!=="/"&&b.length>0&&(a=b[0]+a),n({href:a,title:a,type:d.mime},c,!0)}),function(a){function d(a){return Math.min(1,Math.max(0,a))}function c(b){if(b instanceof a.Dimension)return parseFloat(b.unit=="%"?b.value/100:b.value);if(typeof b=="number")return b;throw{error:"RuntimeError",message:"color functions take numbers as parameters"}}function b(b){return a.functions.hsla(b.h,b.s,b.l,b.a)}a.functions={rgb:function(a,b,c){return this.rgba(a,b,c,1)},rgba:function(b,d,e,f){var g=[b,d,e].map(function(a){return c(a)}),f=c(f);return new a.Color(g,f)},hsl:function(a,b,c){return this.hsla(a,b,c,1)},hsla:function(a,b,d,e){function h(a){a=a<0?a+1:a>1?a-1:a;return a*6<1?g+(f-g)*a*6:a*2<1?f:a*3<2?g+(f-g)*(2/3-a)*6:g}a=c(a)%360/360,b=c(b),d=c(d),e=c(e);var f=d<=.5?d*(b+1):d+b-d*b,g=d*2-f;return this.rgba(h(a+1/3)*255,h(a)*255,h(a-1/3)*255,e)},hue:function(b){return new a.Dimension(Math.round(b.toHSL().h))},saturation:function(b){return new a.Dimension(Math.round(b.toHSL().s*100),"%")},lightness:function(b){return new a.Dimension(Math.round(b.toHSL().l*100),"%")},alpha:function(b){return new a.Dimension(b.toHSL().a)},saturate:function(a,c){var e=a.toHSL();e.s+=c.value/100,e.s=d(e.s);return b(e)},desaturate:function(a,c){var e=a.toHSL();e.s-=c.value/100,e.s=d(e.s);return b(e)},lighten:function(a,c){var e=a.toHSL();e.l+=c.value/100,e.l=d(e.l);return b(e)},darken:function(a,c){var e=a.toHSL();e.l-=c.value/100,e.l=d(e.l);return b(e)},fadein:function(a,c){var e=a.toHSL();e.a+=c.value/100,e.a=d(e.a);return b(e)},fadeout:function(a,c){var e=a.toHSL();e.a-=c.value/100,e.a=d(e.a);return b(e)},spin:function(a,c){var d=a.toHSL(),e=(d.h+c.value)%360;d.h=e<0?360+e:e;return b(d)},mix:function(b,c,d){var e=d.value/100,f=e*2-1,g=b.toHSL().a-c.toHSL().a,h=((f*g==-1?f:(f+g)/(1+f*g))+1)/2,i=1-h,j=[b.rgb[0]*h+c.rgb[0]*i,b.rgb[1]*h+c.rgb[1]*i,b.rgb[2]*h+c.rgb[2]*i],k=b.alpha*e+c.alpha*(1-e);return new a.Color(j,k)},greyscale:function(b){return this.desaturate(b,new a.Dimension(100))},e:function(b){return new a.Anonymous(b instanceof a.JavaScript?b.evaluated:b)},escape:function(b){return new a.Anonymous(encodeURI(b.value).replace(/=/g,"%3D").replace(/:/g,"%3A").replace(/#/g,"%23").replace(/;/g,"%3B").replace(/\(/g,"%28").replace(/\)/g,"%29"))},"%":function(b){var c=Array.prototype.slice.call(arguments,1),d=b.value;for(var e=0;e255?255:a<0?0:a).toString(16);return a.length===1?"0"+a:a}).join("")},operate:function(b,c){var d=[];c instanceof a.Color||(c=c.toColor());for(var e=0;e<3;e++)d[e]=a.operate(b,this.rgb[e],c.rgb[e]);return new a.Color(d,this.alpha+c.alpha)},toHSL:function(){var a=this.rgb[0]/255,b=this.rgb[1]/255,c=this.rgb[2]/255,d=this.alpha,e=Math.max(a,b,c),f=Math.min(a,b,c),g,h,i=(e+f)/2,j=e-f;if(e===f)g=h=0;else{h=i>.5?j/(2-e-f):j/(e+f);switch(e){case a:g=(b-c)/j+(b":a.compress?">":" > "}[this.value]}}(c("less/tree")),function(a){a.Expression=function(a){this.value=a},a.Expression.prototype={eval:function(b){return this.value.length>1?new a.Expression(this.value.map(function(a){return a.eval(b)})):this.value.length===1?this.value[0].eval(b):this},toCSS:function(a){return this.value.map(function(b){return b.toCSS(a)}).join(" ")}}}(c("less/tree")),function(a){a.Import=function(b,c){var d=this;this._path=b,b instanceof a.Quoted?this.path=/\.(le?|c)ss$/.test(b.value)?b.value:b.value+".less":this.path=b.value.value||b.value,this.css=/css$/.test(this.path),this.css||c.push(this.path,function(a){if(!a)throw new Error("Error parsing "+d.path);d.root=a})},a.Import.prototype={toCSS:function(){return this.css?"@import "+this._path.toCSS()+";\n":""},eval:function(b){var c;if(this.css)return this;c=new a.Ruleset(null,this.root.rules.slice(0));for(var d=0;d0){c=this.arguments&&this.arguments.map(function(b){return b.eval(a)});for(var g=0;g0&&c>this.params.length)return!1;d=Math.min(c,this.arity);for(var e=0;e1?Array.prototype.push.apply(d,e.find(new a.Selector(b.elements.slice(1)),c)):d.push(e);break}});return this._lookups[g]=d},toCSS:function(b,c){var d=[],e=[],f=[],g=[],h,i;this.root||(b.length===0?g=this.selectors.map(function(a){return[a]}):this.joinSelectors(g,b,this.selectors));for(var j=0;j0&&(h=g.map(function(a){return a.map(function(a){return a.toCSS(c)}).join("").trim()}).join(c.compress?",":g.length>3?",\n":", "),d.push(h,(c.compress?"{":" {\n ")+e.join(c.compress?"":"\n ")+(c.compress?"}":"\n}\n"))),d.push(f);return d.join("")+(c.compress?"\n":"")},joinSelectors:function(a,b,c){for(var d=0;d0&&e.push(new a.Selector(g)),h.length>0&&f.push(new a.Selector(h));for(var l=0;l0&&typeof a!="undefined"&&(b.value=c[0]+(b.value.charAt(0)==="/"?b.value.slice(1):b.value)),this.value=b,this.paths=c)},b.URL.prototype={toCSS:function(){return"url("+(this.attrs?"data:"+this.attrs +.mime+this.attrs.charset+this.attrs.base64+this.attrs.data:this.value.toCSS())+")"},eval:function(a){return this.attrs?this:new b.URL(this.value.eval(a),this.paths)}}}(c("less/tree")),function(a){a.Value=function(a){this.value=a,this.is="value"},a.Value.prototype={eval:function(b){return this.value.length===1?this.value[0].eval(b):new a.Value(this.value.map(function(a){return a.eval(b)}))},toCSS:function(a){return this.value.map(function(b){return b.toCSS(a)}).join(a.compress?",":", ")}}}(c("less/tree")),function(a){a.Variable=function(a,b){this.name=a,this.index=b},a.Variable.prototype={eval:function(b){var c,d,e=this.name;e.indexOf("@@")==0&&(e="@"+(new a.Variable(e.slice(1))).eval(b).value);if(c=a.find(b.frames,function(a){if(d=a.variable(e))return d.value.eval(b)}))return c;throw{message:"variable "+e+" is undefined",index:this.index}}}}(c("less/tree")),c("less/tree").find=function(a,b){for(var c=0,d;c1?"["+a.value.map(function(a){return a.toCSS(!1)}).join(", ")+"]":a.toCSS(!1)};var f=location.protocol==="file:"||location.protocol==="chrome:"||location.protocol==="chrome-extension:"||location.protocol==="resource:";d.env=d.env||(location.hostname=="127.0.0.1"||location.hostname=="0.0.0.0"||location.hostname=="localhost"||location.port.length>0||f?"development":"production"),d.async=!1,d.poll=d.poll||(f?1e3:1500),d.watch=function(){return this.watchMode=!0},d.unwatch=function(){return this.watchMode=!1},d.env==="development"?(d.optimization=0,/!watch/.test(location.hash)&&d.watch(),d.watchTimer=setInterval(function(){d.watchMode&&m(function(a,b,c){a&&p(a.toCSS(),b,c.lastModified)})},d.poll)):d.optimization=3;var g;try{g=typeof a.localStorage=="undefined"?null:a.localStorage}catch(h){g=null}var i=document.getElementsByTagName("link"),j=/^text\/(x-)?less$/;d.sheets=[];for(var k=0;k>> 0; + for (var i = 0; i < len; i++) { + if (i in this) { + block.call(thisObject, this[i], i, this); + } + } + }; +} +if (!Array.prototype.map) { + Array.prototype.map = function(fun /*, thisp*/) { + var len = this.length >>> 0; + var res = new Array(len); + var thisp = arguments[1]; + + for (var i = 0; i < len; i++) { + if (i in this) { + res[i] = fun.call(thisp, this[i], i, this); + } + } + return res; + }; +} +if (!Array.prototype.filter) { + Array.prototype.filter = function (block /*, thisp */) { + var values = []; + var thisp = arguments[1]; + for (var i = 0; i < this.length; i++) { + if (block.call(thisp, this[i])) { + values.push(this[i]); + } + } + return values; + }; +} +if (!Array.prototype.reduce) { + Array.prototype.reduce = function(fun /*, initial*/) { + var len = this.length >>> 0; + var i = 0; + + // no value to return if no initial value and an empty array + if (len === 0 && arguments.length === 1) throw new TypeError(); + + if (arguments.length >= 2) { + var rv = arguments[1]; + } else { + do { + if (i in this) { + rv = this[i++]; + break; + } + // if array contains no values, no initial value to return + if (++i >= len) throw new TypeError(); + } while (true); + } + for (; i < len; i++) { + if (i in this) { + rv = fun.call(null, rv, this[i], i, this); + } + } + return rv; + }; +} +if (!Array.prototype.indexOf) { + Array.prototype.indexOf = function (value /*, fromIndex */ ) { + var length = this.length; + var i = arguments[1] || 0; + + if (!length) return -1; + if (i >= length) return -1; + if (i < 0) i += length; + + for (; i < length; i++) { + if (!Object.prototype.hasOwnProperty.call(this, i)) { continue } + if (value === this[i]) return i; + } + return -1; + }; +} + +// +// Object +// +if (!Object.keys) { + Object.keys = function (object) { + var keys = []; + for (var name in object) { + if (Object.prototype.hasOwnProperty.call(object, name)) { + keys.push(name); + } + } + return keys; + }; +} + +// +// String +// +if (!String.prototype.trim) { + String.prototype.trim = function () { + return String(this).replace(/^\s\s*/, '').replace(/\s\s*$/, ''); + }; +} +var less, tree; + +if (typeof(window) === 'undefined') { + if (typeof(exports) === 'undefined') { + // Rhino + less = {}; + tree = less.tree = {}; + } else { + // Node.js + less = exports, + tree = require('less/tree'); + } +} else { + // Browser + if (typeof(window.less) === 'undefined') { window.less = {} } + less = window.less, + tree = window.less.tree = {}; +} +// +// less.js - parser +// +// A relatively straight-forward predictive parser. +// There is no tokenization/lexing stage, the input is parsed +// in one sweep. +// +// To make the parser fast enough to run in the browser, several +// optimization had to be made: +// +// - Matching and slicing on a huge input is often cause of slowdowns. +// The solution is to chunkify the input into smaller strings. +// The chunks are stored in the `chunks` var, +// `j` holds the current chunk index, and `current` holds +// the index of the current chunk in relation to `input`. +// This gives us an almost 4x speed-up. +// +// - In many cases, we don't need to match individual tokens; +// for example, if a value doesn't hold any variables, operations +// or dynamic references, the parser can effectively 'skip' it, +// treating it as a literal. +// An example would be '1px solid #000' - which evaluates to itself, +// we don't need to know what the individual components are. +// The drawback, of course is that you don't get the benefits of +// syntax-checking on the CSS. This gives us a 50% speed-up in the parser, +// and a smaller speed-up in the code-gen. +// +// +// Token matching is done with the `$` function, which either takes +// a terminal string or regexp, or a non-terminal function to call. +// It also takes care of moving all the indices forwards. +// +// +less.Parser = function Parser(env) { + var input, // LeSS input string + i, // current index in `input` + j, // current chunk + temp, // temporarily holds a chunk's state, for backtracking + memo, // temporarily holds `i`, when backtracking + furthest, // furthest index the parser has gone to + chunks, // chunkified input + current, // index of current chunk, in `input` + parser; + + var that = this; + + // This function is called after all files + // have been imported through `@import`. + var finish = function () {}; + + var imports = this.imports = { + paths: env && env.paths || [], // Search paths, when importing + queue: [], // Files which haven't been imported yet + files: {}, // Holds the imported parse trees + mime: env && env.mime, // MIME type of .less files + push: function (path, callback) { + var that = this; + this.queue.push(path); + + // + // Import a file asynchronously + // + less.Parser.importer(path, this.paths, function (root) { + that.queue.splice(that.queue.indexOf(path), 1); // Remove the path from the queue + that.files[path] = root; // Store the root + + callback(root); + + if (that.queue.length === 0) { finish() } // Call `finish` if we're done importing + }, env); + } + }; + + function save() { temp = chunks[j], memo = i, current = i } + function restore() { chunks[j] = temp, i = memo, current = i } + + function sync() { + if (i > current) { + chunks[j] = chunks[j].slice(i - current); + current = i; + } + } + // + // Parse from a token, regexp or string, and move forward if match + // + function $(tok) { + var match, args, length, c, index, endIndex, k, mem; + + // + // Non-terminal + // + if (tok instanceof Function) { + return tok.call(parser.parsers); + // + // Terminal + // + // Either match a single character in the input, + // or match a regexp in the current chunk (chunk[j]). + // + } else if (typeof(tok) === 'string') { + match = input.charAt(i) === tok ? tok : null; + length = 1; + sync (); + } else { + sync (); + + if (match = tok.exec(chunks[j])) { + length = match[0].length; + } else { + return null; + } + } + + // The match is confirmed, add the match length to `i`, + // and consume any extra white-space characters (' ' || '\n') + // which come after that. The reason for this is that LeSS's + // grammar is mostly white-space insensitive. + // + if (match) { + mem = i += length; + endIndex = i + chunks[j].length - length; + + while (i < endIndex) { + c = input.charCodeAt(i); + if (! (c === 32 || c === 10 || c === 9)) { break } + i++; + } + chunks[j] = chunks[j].slice(length + (i - mem)); + current = i; + + if (chunks[j].length === 0 && j < chunks.length - 1) { j++ } + + if(typeof(match) === 'string') { + return match; + } else { + return match.length === 1 ? match[0] : match; + } + } + } + + // Same as $(), but don't change the state of the parser, + // just return the match. + function peek(tok) { + if (typeof(tok) === 'string') { + return input.charAt(i) === tok; + } else { + if (tok.test(chunks[j])) { + return true; + } else { + return false; + } + } + } + + this.env = env = env || {}; + + // The optimization level dictates the thoroughness of the parser, + // the lower the number, the less nodes it will create in the tree. + // This could matter for debugging, or if you want to access + // the individual nodes in the tree. + this.optimization = ('optimization' in this.env) ? this.env.optimization : 1; + + this.env.filename = this.env.filename || null; + + // + // The Parser + // + return parser = { + + imports: imports, + // + // Parse an input string into an abstract syntax tree, + // call `callback` when done. + // + parse: function (str, callback) { + var root, start, end, zone, line, lines, buff = [], c, error = null; + + i = j = current = furthest = 0; + chunks = []; + input = str.replace(/\r\n/g, '\n'); + + // Split the input into chunks. + chunks = (function (chunks) { + var j = 0, + skip = /[^"'`\{\}\/\(\)]+/g, + comment = /\/\*(?:[^*]|\*+[^\/*])*\*+\/|\/\/.*/g, + level = 0, + match, + chunk = chunks[0], + inParam, + inString; + + for (var i = 0, c, cc; i < input.length; i++) { + skip.lastIndex = i; + if (match = skip.exec(input)) { + if (match.index === i) { + i += match[0].length; + chunk.push(match[0]); + } + } + c = input.charAt(i); + comment.lastIndex = i; + + if (!inString && !inParam && c === '/') { + cc = input.charAt(i + 1); + if (cc === '/' || cc === '*') { + if (match = comment.exec(input)) { + if (match.index === i) { + i += match[0].length; + chunk.push(match[0]); + c = input.charAt(i); + } + } + } + } + + if (c === '{' && !inString && !inParam) { level ++; + chunk.push(c); + } else if (c === '}' && !inString && !inParam) { level --; + chunk.push(c); + chunks[++j] = chunk = []; + } else if (c === '(' && !inString && !inParam) { + chunk.push(c); + inParam = true; + } else if (c === ')' && !inString && inParam) { + chunk.push(c); + inParam = false; + } else { + if (c === '"' || c === "'" || c === '`') { + if (! inString) { + inString = c; + } else { + inString = inString === c ? false : inString; + } + } + chunk.push(c); + } + } + if (level > 0) { + throw { + type: 'Syntax', + message: "Missing closing `}`", + filename: env.filename + }; + } + + return chunks.map(function (c) { return c.join('') });; + })([[]]); + + // Start with the primary rule. + // The whole syntax tree is held under a Ruleset node, + // with the `root` property set to true, so no `{}` are + // output. The callback is called when the input is parsed. + root = new(tree.Ruleset)([], $(this.parsers.primary)); + root.root = true; + + root.toCSS = (function (evaluate) { + var line, lines, column; + + return function (options, variables) { + var frames = []; + + options = options || {}; + // + // Allows setting variables with a hash, so: + // + // `{ color: new(tree.Color)('#f01') }` will become: + // + // new(tree.Rule)('@color', + // new(tree.Value)([ + // new(tree.Expression)([ + // new(tree.Color)('#f01') + // ]) + // ]) + // ) + // + if (typeof(variables) === 'object' && !Array.isArray(variables)) { + variables = Object.keys(variables).map(function (k) { + var value = variables[k]; + + if (! (value instanceof tree.Value)) { + if (! (value instanceof tree.Expression)) { + value = new(tree.Expression)([value]); + } + value = new(tree.Value)([value]); + } + return new(tree.Rule)('@' + k, value, false, 0); + }); + frames = [new(tree.Ruleset)(null, variables)]; + } + + try { + var css = evaluate.call(this, { frames: frames }) + .toCSS([], { compress: options.compress || false }); + } catch (e) { + lines = input.split('\n'); + line = getLine(e.index); + + for (var n = e.index, column = -1; + n >= 0 && input.charAt(n) !== '\n'; + n--) { column++ } + + throw { + type: e.type, + message: e.message, + filename: env.filename, + index: e.index, + line: typeof(line) === 'number' ? line + 1 : null, + callLine: e.call && (getLine(e.call) + 1), + callExtract: lines[getLine(e.call)], + stack: e.stack, + column: column, + extract: [ + lines[line - 1], + lines[line], + lines[line + 1] + ] + }; + } + if (options.compress) { + return css.replace(/(\s)+/g, "$1"); + } else { + return css; + } + + function getLine(index) { + return index ? (input.slice(0, index).match(/\n/g) || "").length : null; + } + }; + })(root.eval); + + // If `i` is smaller than the `input.length - 1`, + // it means the parser wasn't able to parse the whole + // string, so we've got a parsing error. + // + // We try to extract a \n delimited string, + // showing the line where the parse error occured. + // We split it up into two parts (the part which parsed, + // and the part which didn't), so we can color them differently. + if (i < input.length - 1) { + i = furthest; + lines = input.split('\n'); + line = (input.slice(0, i).match(/\n/g) || "").length + 1; + + for (var n = i, column = -1; n >= 0 && input.charAt(n) !== '\n'; n--) { column++ } + + error = { + name: "ParseError", + message: "Syntax Error on line " + line, + index: i, + filename: env.filename, + line: line, + column: column, + extract: [ + lines[line - 2], + lines[line - 1], + lines[line] + ] + }; + } + + if (this.imports.queue.length > 0) { + finish = function () { callback(error, root) }; + } else { + callback(error, root); + } + }, + + // + // Here in, the parsing rules/functions + // + // The basic structure of the syntax tree generated is as follows: + // + // Ruleset -> Rule -> Value -> Expression -> Entity + // + // Here's some LESS code: + // + // .class { + // color: #fff; + // border: 1px solid #000; + // width: @w + 4px; + // > .child {...} + // } + // + // And here's what the parse tree might look like: + // + // Ruleset (Selector '.class', [ + // Rule ("color", Value ([Expression [Color #fff]])) + // Rule ("border", Value ([Expression [Dimension 1px][Keyword "solid"][Color #000]])) + // Rule ("width", Value ([Expression [Operation "+" [Variable "@w"][Dimension 4px]]])) + // Ruleset (Selector [Element '>', '.child'], [...]) + // ]) + // + // In general, most rules will try to parse a token with the `$()` function, and if the return + // value is truly, will return a new node, of the relevant type. Sometimes, we need to check + // first, before parsing, that's when we use `peek()`. + // + parsers: { + // + // The `primary` rule is the *entry* and *exit* point of the parser. + // The rules here can appear at any level of the parse tree. + // + // The recursive nature of the grammar is an interplay between the `block` + // rule, which represents `{ ... }`, the `ruleset` rule, and this `primary` rule, + // as represented by this simplified grammar: + // + // primary → (ruleset | rule)+ + // ruleset → selector+ block + // block → '{' primary '}' + // + // Only at one point is the primary rule not called from the + // block rule: at the root level. + // + primary: function () { + var node, root = []; + + while ((node = $(this.mixin.definition) || $(this.rule) || $(this.ruleset) || + $(this.mixin.call) || $(this.comment) || $(this.directive)) + || $(/^[\s\n]+/)) { + node && root.push(node); + } + return root; + }, + + // We create a Comment node for CSS comments `/* */`, + // but keep the LeSS comments `//` silent, by just skipping + // over them. + comment: function () { + var comment; + + if (input.charAt(i) !== '/') return; + + if (input.charAt(i + 1) === '/') { + return new(tree.Comment)($(/^\/\/.*/), true); + } else if (comment = $(/^\/\*(?:[^*]|\*+[^\/*])*\*+\/\n?/)) { + return new(tree.Comment)(comment); + } + }, + + // + // Entities are tokens which can be found inside an Expression + // + entities: { + // + // A string, which supports escaping " and ' + // + // "milky way" 'he\'s the one!' + // + quoted: function () { + var str, j = i, e; + + if (input.charAt(j) === '~') { j++, e = true } // Escaped strings + if (input.charAt(j) !== '"' && input.charAt(j) !== "'") return; + + e && $('~'); + + if (str = $(/^"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'/)) { + return new(tree.Quoted)(str[0], str[1] || str[2], e); + } + }, + + // + // A catch-all word, such as: + // + // black border-collapse + // + keyword: function () { + var k; + if (k = $(/^[A-Za-z-]+/)) { return new(tree.Keyword)(k) } + }, + + // + // A function call + // + // rgb(255, 0, 255) + // + // We also try to catch IE's `alpha()`, but let the `alpha` parser + // deal with the details. + // + // The arguments are parsed with the `entities.arguments` parser. + // + call: function () { + var name, args, index = i; + + if (! (name = /^([\w-]+|%)\(/.exec(chunks[j]))) return; + + name = name[1].toLowerCase(); + + if (name === 'url') { return null } + else { i += name.length } + + if (name === 'alpha') { return $(this.alpha) } + + $('('); // Parse the '(' and consume whitespace. + + args = $(this.entities.arguments); + + if (! $(')')) return; + + if (name) { return new(tree.Call)(name, args, index) } + }, + arguments: function () { + var args = [], arg; + + while (arg = $(this.expression)) { + args.push(arg); + if (! $(',')) { break } + } + return args; + }, + literal: function () { + return $(this.entities.dimension) || + $(this.entities.color) || + $(this.entities.quoted); + }, + + // + // Parse url() tokens + // + // We use a specific rule for urls, because they don't really behave like + // standard function calls. The difference is that the argument doesn't have + // to be enclosed within a string, so it can't be parsed as an Expression. + // + url: function () { + var value; + + if (input.charAt(i) !== 'u' || !$(/^url\(/)) return; + value = $(this.entities.quoted) || $(this.entities.variable) || + $(this.entities.dataURI) || $(/^[-\w%@$\/.&=:;#+?~]+/) || ""; + if (! $(')')) throw new(Error)("missing closing ) for url()"); + + return new(tree.URL)((value.value || value.data || value instanceof tree.Variable) + ? value : new(tree.Anonymous)(value), imports.paths); + }, + + dataURI: function () { + var obj; + + if ($(/^data:/)) { + obj = {}; + obj.mime = $(/^[^\/]+\/[^,;)]+/) || ''; + obj.charset = $(/^;\s*charset=[^,;)]+/) || ''; + obj.base64 = $(/^;\s*base64/) || ''; + obj.data = $(/^,\s*[^)]+/); + + if (obj.data) { return obj } + } + }, + + // + // A Variable entity, such as `@fink`, in + // + // width: @fink + 2px + // + // We use a different parser for variable definitions, + // see `parsers.variable`. + // + variable: function () { + var name, index = i; + + if (input.charAt(i) === '@' && (name = $(/^@@?[\w-]+/))) { + return new(tree.Variable)(name, index); + } + }, + + // + // A Hexadecimal color + // + // #4F3C2F + // + // `rgb` and `hsl` colors are parsed through the `entities.call` parser. + // + color: function () { + var rgb; + + if (input.charAt(i) === '#' && (rgb = $(/^#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})/))) { + return new(tree.Color)(rgb[1]); + } + }, + + // + // A Dimension, that is, a number and a unit + // + // 0.5em 95% + // + dimension: function () { + var value, c = input.charCodeAt(i); + if ((c > 57 || c < 45) || c === 47) return; + + if (value = $(/^(-?\d*\.?\d+)(px|%|em|pc|ex|in|deg|s|ms|pt|cm|mm|rad|grad|turn)?/)) { + return new(tree.Dimension)(value[1], value[2]); + } + }, + + // + // JavaScript code to be evaluated + // + // `window.location.href` + // + javascript: function () { + var str, j = i, e; + + if (input.charAt(j) === '~') { j++, e = true } // Escaped strings + if (input.charAt(j) !== '`') { return } + + e && $('~'); + + if (str = $(/^`([^`]*)`/)) { + return new(tree.JavaScript)(str[1], i, e); + } + } + }, + + // + // The variable part of a variable definition. Used in the `rule` parser + // + // @fink: + // + variable: function () { + var name; + + if (input.charAt(i) === '@' && (name = $(/^(@[\w-]+)\s*:/))) { return name[1] } + }, + + // + // A font size/line-height shorthand + // + // small/12px + // + // We need to peek first, or we'll match on keywords and dimensions + // + shorthand: function () { + var a, b; + + if (! peek(/^[@\w.%-]+\/[@\w.-]+/)) return; + + if ((a = $(this.entity)) && $('/') && (b = $(this.entity))) { + return new(tree.Shorthand)(a, b); + } + }, + + // + // Mixins + // + mixin: { + // + // A Mixin call, with an optional argument list + // + // #mixins > .square(#fff); + // .rounded(4px, black); + // .button; + // + // The `while` loop is there because mixins can be + // namespaced, but we only support the child and descendant + // selector for now. + // + call: function () { + var elements = [], e, c, args, index = i, s = input.charAt(i); + + if (s !== '.' && s !== '#') { return } + + while (e = $(/^[#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/)) { + elements.push(new(tree.Element)(c, e)); + c = $('>'); + } + $('(') && (args = $(this.entities.arguments)) && $(')'); + + if (elements.length > 0 && ($(';') || peek('}'))) { + return new(tree.mixin.Call)(elements, args, index); + } + }, + + // + // A Mixin definition, with a list of parameters + // + // .rounded (@radius: 2px, @color) { + // ... + // } + // + // Until we have a finer grained state-machine, we have to + // do a look-ahead, to make sure we don't have a mixin call. + // See the `rule` function for more information. + // + // We start by matching `.rounded (`, and then proceed on to + // the argument list, which has optional default values. + // We store the parameters in `params`, with a `value` key, + // if there is a value, such as in the case of `@radius`. + // + // Once we've got our params list, and a closing `)`, we parse + // the `{...}` block. + // + definition: function () { + var name, params = [], match, ruleset, param, value; + + if ((input.charAt(i) !== '.' && input.charAt(i) !== '#') || + peek(/^[^{]*(;|})/)) return; + + if (match = $(/^([#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+)\s*\(/)) { + name = match[1]; + + while (param = $(this.entities.variable) || $(this.entities.literal) + || $(this.entities.keyword)) { + // Variable + if (param instanceof tree.Variable) { + if ($(':')) { + if (value = $(this.expression)) { + params.push({ name: param.name, value: value }); + } else { + throw new(Error)("Expected value"); + } + } else { + params.push({ name: param.name }); + } + } else { + params.push({ value: param }); + } + if (! $(',')) { break } + } + if (! $(')')) throw new(Error)("Expected )"); + + ruleset = $(this.block); + + if (ruleset) { + return new(tree.mixin.Definition)(name, params, ruleset); + } + } + } + }, + + // + // Entities are the smallest recognized token, + // and can be found inside a rule's value. + // + entity: function () { + return $(this.entities.literal) || $(this.entities.variable) || $(this.entities.url) || + $(this.entities.call) || $(this.entities.keyword) || $(this.entities.javascript) || + $(this.comment); + }, + + // + // A Rule terminator. Note that we use `peek()` to check for '}', + // because the `block` rule will be expecting it, but we still need to make sure + // it's there, if ';' was ommitted. + // + end: function () { + return $(';') || peek('}'); + }, + + // + // IE's alpha function + // + // alpha(opacity=88) + // + alpha: function () { + var value; + + if (! $(/^\(opacity=/i)) return; + if (value = $(/^\d+/) || $(this.entities.variable)) { + if (! $(')')) throw new(Error)("missing closing ) for alpha()"); + return new(tree.Alpha)(value); + } + }, + + // + // A Selector Element + // + // div + // + h1 + // #socks + // input[type="text"] + // + // Elements are the building blocks for Selectors, + // they are made out of a `Combinator` (see combinator rule), + // and an element name, such as a tag a class, or `*`. + // + element: function () { + var e, t, c; + + c = $(this.combinator); + e = $(/^(?:[.#]?|:*)(?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/) || $('*') || $(this.attribute) || $(/^\([^)@]+\)/); + + if (e) { return new(tree.Element)(c, e) } + + if (c.value && c.value[0] === '&') { + return new(tree.Element)(c, null); + } + }, + + // + // Combinators combine elements together, in a Selector. + // + // Because our parser isn't white-space sensitive, special care + // has to be taken, when parsing the descendant combinator, ` `, + // as it's an empty space. We have to check the previous character + // in the input, to see if it's a ` ` character. More info on how + // we deal with this in *combinator.js*. + // + combinator: function () { + var match, c = input.charAt(i); + + if (c === '>' || c === '+' || c === '~') { + i++; + while (input.charAt(i) === ' ') { i++ } + return new(tree.Combinator)(c); + } else if (c === '&') { + match = '&'; + i++; + if(input.charAt(i) === ' ') { + match = '& '; + } + while (input.charAt(i) === ' ') { i++ } + return new(tree.Combinator)(match); + } else if (c === ':' && input.charAt(i + 1) === ':') { + i += 2; + while (input.charAt(i) === ' ') { i++ } + return new(tree.Combinator)('::'); + } else if (input.charAt(i - 1) === ' ') { + return new(tree.Combinator)(" "); + } else { + return new(tree.Combinator)(null); + } + }, + + // + // A CSS Selector + // + // .class > div + h1 + // li a:hover + // + // Selectors are made out of one or more Elements, see above. + // + selector: function () { + var sel, e, elements = [], c, match; + + while (e = $(this.element)) { + c = input.charAt(i); + elements.push(e) + if (c === '{' || c === '}' || c === ';' || c === ',') { break } + } + + if (elements.length > 0) { return new(tree.Selector)(elements) } + }, + tag: function () { + return $(/^[a-zA-Z][a-zA-Z-]*[0-9]?/) || $('*'); + }, + attribute: function () { + var attr = '', key, val, op; + + if (! $('[')) return; + + if (key = $(/^[a-zA-Z-]+/) || $(this.entities.quoted)) { + if ((op = $(/^[|~*$^]?=/)) && + (val = $(this.entities.quoted) || $(/^[\w-]+/))) { + attr = [key, op, val.toCSS ? val.toCSS() : val].join(''); + } else { attr = key } + } + + if (! $(']')) return; + + if (attr) { return "[" + attr + "]" } + }, + + // + // The `block` rule is used by `ruleset` and `mixin.definition`. + // It's a wrapper around the `primary` rule, with added `{}`. + // + block: function () { + var content; + + if ($('{') && (content = $(this.primary)) && $('}')) { + return content; + } + }, + + // + // div, .class, body > p {...} + // + ruleset: function () { + var selectors = [], s, rules, match; + save(); + + if (match = /^([.#: \w-]+)[\s\n]*\{/.exec(chunks[j])) { + i += match[0].length - 1; + selectors = [new(tree.Selector)([new(tree.Element)(null, match[1])])]; + } else { + while (s = $(this.selector)) { + selectors.push(s); + $(this.comment); + if (! $(',')) { break } + $(this.comment); + } + } + + if (selectors.length > 0 && (rules = $(this.block))) { + return new(tree.Ruleset)(selectors, rules); + } else { + // Backtrack + furthest = i; + restore(); + } + }, + rule: function () { + var name, value, c = input.charAt(i), important, match; + save(); + + if (c === '.' || c === '#' || c === '&') { return } + + if (name = $(this.variable) || $(this.property)) { + if ((name.charAt(0) != '@') && (match = /^([^@+\/'"*`(;{}-]*);/.exec(chunks[j]))) { + i += match[0].length - 1; + value = new(tree.Anonymous)(match[1]); + } else if (name === "font") { + value = $(this.font); + } else { + value = $(this.value); + } + important = $(this.important); + + if (value && $(this.end)) { + return new(tree.Rule)(name, value, important, memo); + } else { + furthest = i; + restore(); + } + } + }, + + // + // An @import directive + // + // @import "lib"; + // + // Depending on our environemnt, importing is done differently: + // In the browser, it's an XHR request, in Node, it would be a + // file-system operation. The function used for importing is + // stored in `import`, which we pass to the Import constructor. + // + "import": function () { + var path; + if ($(/^@import\s+/) && + (path = $(this.entities.quoted) || $(this.entities.url)) && + $(';')) { + return new(tree.Import)(path, imports); + } + }, + + // + // A CSS Directive + // + // @charset "utf-8"; + // + directive: function () { + var name, value, rules, types; + + if (input.charAt(i) !== '@') return; + + if (value = $(this['import'])) { + return value; + } else if (name = $(/^@media|@page|@-[-a-z]+/)) { + types = ($(/^[^{]+/) || '').trim(); + if (rules = $(this.block)) { + return new(tree.Directive)(name + " " + types, rules); + } + } else if (name = $(/^@[-a-z]+/)) { + if (name === '@font-face') { + if (rules = $(this.block)) { + return new(tree.Directive)(name, rules); + } + } else if ((value = $(this.entity)) && $(';')) { + return new(tree.Directive)(name, value); + } + } + }, + font: function () { + var value = [], expression = [], weight, shorthand, font, e; + + while (e = $(this.shorthand) || $(this.entity)) { + expression.push(e); + } + value.push(new(tree.Expression)(expression)); + + if ($(',')) { + while (e = $(this.expression)) { + value.push(e); + if (! $(',')) { break } + } + } + return new(tree.Value)(value); + }, + + // + // A Value is a comma-delimited list of Expressions + // + // font-family: Baskerville, Georgia, serif; + // + // In a Rule, a Value represents everything after the `:`, + // and before the `;`. + // + value: function () { + var e, expressions = [], important; + + while (e = $(this.expression)) { + expressions.push(e); + if (! $(',')) { break } + } + + if (expressions.length > 0) { + return new(tree.Value)(expressions); + } + }, + important: function () { + if (input.charAt(i) === '!') { + return $(/^! *important/); + } + }, + sub: function () { + var e; + + if ($('(') && (e = $(this.expression)) && $(')')) { + return e; + } + }, + multiplication: function () { + var m, a, op, operation; + if (m = $(this.operand)) { + while ((op = ($('/') || $('*'))) && (a = $(this.operand))) { + operation = new(tree.Operation)(op, [operation || m, a]); + } + return operation || m; + } + }, + addition: function () { + var m, a, op, operation; + if (m = $(this.multiplication)) { + while ((op = $(/^[-+]\s+/) || (input.charAt(i - 1) != ' ' && ($('+') || $('-')))) && + (a = $(this.multiplication))) { + operation = new(tree.Operation)(op, [operation || m, a]); + } + return operation || m; + } + }, + + // + // An operand is anything that can be part of an operation, + // such as a Color, or a Variable + // + operand: function () { + var negate, p = input.charAt(i + 1); + + if (input.charAt(i) === '-' && (p === '@' || p === '(')) { negate = $('-') } + var o = $(this.sub) || $(this.entities.dimension) || + $(this.entities.color) || $(this.entities.variable) || + $(this.entities.call); + return negate ? new(tree.Operation)('*', [new(tree.Dimension)(-1), o]) + : o; + }, + + // + // Expressions either represent mathematical operations, + // or white-space delimited Entities. + // + // 1px solid black + // @var * 2 + // + expression: function () { + var e, delim, entities = [], d; + + while (e = $(this.addition) || $(this.entity)) { + entities.push(e); + } + if (entities.length > 0) { + return new(tree.Expression)(entities); + } + }, + property: function () { + var name; + + if (name = $(/^(\*?-?[-a-z_0-9]+)\s*:/)) { + return name[1]; + } + } + } + }; +}; + +if (typeof(window) !== 'undefined' /* browser */ || typeof(exports) === 'undefined' /* rhino */) { + // + // Used by `@import` directives + // + less.Parser.importer = function (path, paths, callback, env) { + if (path.charAt(0) !== '/' && paths.length > 0) { + path = paths[0] + path; + } + // We pass `true` as 3rd argument, to force the reload of the import. + // This is so we can get the syntax tree as opposed to just the CSS output, + // as we need this to evaluate the current stylesheet. + loadStyleSheet({ href: path, title: path, type: env.mime }, callback, true); + }; +} + +(function (tree) { + +tree.functions = { + rgb: function (r, g, b) { + return this.rgba(r, g, b, 1.0); + }, + rgba: function (r, g, b, a) { + var rgb = [r, g, b].map(function (c) { return number(c) }), + a = number(a); + return new(tree.Color)(rgb, a); + }, + hsl: function (h, s, l) { + return this.hsla(h, s, l, 1.0); + }, + hsla: function (h, s, l, a) { + h = (number(h) % 360) / 360; + s = number(s); l = number(l); a = number(a); + + var m2 = l <= 0.5 ? l * (s + 1) : l + s - l * s; + var m1 = l * 2 - m2; + + return this.rgba(hue(h + 1/3) * 255, + hue(h) * 255, + hue(h - 1/3) * 255, + a); + + function hue(h) { + h = h < 0 ? h + 1 : (h > 1 ? h - 1 : h); + if (h * 6 < 1) return m1 + (m2 - m1) * h * 6; + else if (h * 2 < 1) return m2; + else if (h * 3 < 2) return m1 + (m2 - m1) * (2/3 - h) * 6; + else return m1; + } + }, + hue: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().h)); + }, + saturation: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().s * 100), '%'); + }, + lightness: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().l * 100), '%'); + }, + alpha: function (color) { + return new(tree.Dimension)(color.toHSL().a); + }, + saturate: function (color, amount) { + var hsl = color.toHSL(); + + hsl.s += amount.value / 100; + hsl.s = clamp(hsl.s); + return hsla(hsl); + }, + desaturate: function (color, amount) { + var hsl = color.toHSL(); + + hsl.s -= amount.value / 100; + hsl.s = clamp(hsl.s); + return hsla(hsl); + }, + lighten: function (color, amount) { + var hsl = color.toHSL(); + + hsl.l += amount.value / 100; + hsl.l = clamp(hsl.l); + return hsla(hsl); + }, + darken: function (color, amount) { + var hsl = color.toHSL(); + + hsl.l -= amount.value / 100; + hsl.l = clamp(hsl.l); + return hsla(hsl); + }, + fadein: function (color, amount) { + var hsl = color.toHSL(); + + hsl.a += amount.value / 100; + hsl.a = clamp(hsl.a); + return hsla(hsl); + }, + fadeout: function (color, amount) { + var hsl = color.toHSL(); + + hsl.a -= amount.value / 100; + hsl.a = clamp(hsl.a); + return hsla(hsl); + }, + spin: function (color, amount) { + var hsl = color.toHSL(); + var hue = (hsl.h + amount.value) % 360; + + hsl.h = hue < 0 ? 360 + hue : hue; + + return hsla(hsl); + }, + // + // Copyright (c) 2006-2009 Hampton Catlin, Nathan Weizenbaum, and Chris Eppstein + // http://sass-lang.com + // + mix: function (color1, color2, weight) { + var p = weight.value / 100.0; + var w = p * 2 - 1; + var a = color1.toHSL().a - color2.toHSL().a; + + var w1 = (((w * a == -1) ? w : (w + a) / (1 + w * a)) + 1) / 2.0; + var w2 = 1 - w1; + + var rgb = [color1.rgb[0] * w1 + color2.rgb[0] * w2, + color1.rgb[1] * w1 + color2.rgb[1] * w2, + color1.rgb[2] * w1 + color2.rgb[2] * w2]; + + var alpha = color1.alpha * p + color2.alpha * (1 - p); + + return new(tree.Color)(rgb, alpha); + }, + greyscale: function (color) { + return this.desaturate(color, new(tree.Dimension)(100)); + }, + e: function (str) { + return new(tree.Anonymous)(str instanceof tree.JavaScript ? str.evaluated : str); + }, + escape: function (str) { + return new(tree.Anonymous)(encodeURI(str.value).replace(/=/g, "%3D").replace(/:/g, "%3A").replace(/#/g, "%23").replace(/;/g, "%3B").replace(/\(/g, "%28").replace(/\)/g, "%29")); + }, + '%': function (quoted /* arg, arg, ...*/) { + var args = Array.prototype.slice.call(arguments, 1), + str = quoted.value; + + for (var i = 0; i < args.length; i++) { + str = str.replace(/%[sda]/i, function(token) { + var value = token.match(/s/i) ? args[i].value : args[i].toCSS(); + return token.match(/[A-Z]$/) ? encodeURIComponent(value) : value; + }); + } + str = str.replace(/%%/g, '%'); + return new(tree.Quoted)('"' + str + '"', str); + }, + round: function (n) { + if (n instanceof tree.Dimension) { + return new(tree.Dimension)(Math.round(number(n)), n.unit); + } else if (typeof(n) === 'number') { + return Math.round(n); + } else { + throw { + error: "RuntimeError", + message: "math functions take numbers as parameters" + }; + } + } +}; + +function hsla(hsla) { + return tree.functions.hsla(hsla.h, hsla.s, hsla.l, hsla.a); +} + +function number(n) { + if (n instanceof tree.Dimension) { + return parseFloat(n.unit == '%' ? n.value / 100 : n.value); + } else if (typeof(n) === 'number') { + return n; + } else { + throw { + error: "RuntimeError", + message: "color functions take numbers as parameters" + }; + } +} + +function clamp(val) { + return Math.min(1, Math.max(0, val)); +} + +})(require('less/tree')); +(function (tree) { + +tree.Alpha = function (val) { + this.value = val; +}; +tree.Alpha.prototype = { + toCSS: function () { + return "alpha(opacity=" + + (this.value.toCSS ? this.value.toCSS() : this.value) + ")"; + }, + eval: function (env) { + if (this.value.eval) { this.value = this.value.eval(env) } + return this; + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Anonymous = function (string) { + this.value = string.value || string; +}; +tree.Anonymous.prototype = { + toCSS: function () { + return this.value; + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +// +// A function call node. +// +tree.Call = function (name, args, index) { + this.name = name; + this.args = args; + this.index = index; +}; +tree.Call.prototype = { + // + // When evaluating a function call, + // we either find the function in `tree.functions` [1], + // in which case we call it, passing the evaluated arguments, + // or we simply print it out as it appeared originally [2]. + // + // The *functions.js* file contains the built-in functions. + // + // The reason why we evaluate the arguments, is in the case where + // we try to pass a variable to a function, like: `saturate(@color)`. + // The function should receive the value, not the variable. + // + eval: function (env) { + var args = this.args.map(function (a) { return a.eval(env) }); + + if (this.name in tree.functions) { // 1. + try { + return tree.functions[this.name].apply(tree.functions, args); + } catch (e) { + throw { message: "error evaluating function `" + this.name + "`", + index: this.index }; + } + } else { // 2. + return new(tree.Anonymous)(this.name + + "(" + args.map(function (a) { return a.toCSS() }).join(', ') + ")"); + } + }, + + toCSS: function (env) { + return this.eval(env).toCSS(); + } +}; + +})(require('less/tree')); +(function (tree) { +// +// RGB Colors - #ff0014, #eee +// +tree.Color = function (rgb, a) { + // + // The end goal here, is to parse the arguments + // into an integer triplet, such as `128, 255, 0` + // + // This facilitates operations and conversions. + // + if (Array.isArray(rgb)) { + this.rgb = rgb; + } else if (rgb.length == 6) { + this.rgb = rgb.match(/.{2}/g).map(function (c) { + return parseInt(c, 16); + }); + } else if (rgb.length == 8) { + this.alpha = parseInt(rgb.substring(0,2), 16) / 255.0; + this.rgb = rgb.substr(2).match(/.{2}/g).map(function (c) { + return parseInt(c, 16); + }); + } else { + this.rgb = rgb.split('').map(function (c) { + return parseInt(c + c, 16); + }); + } + this.alpha = typeof(a) === 'number' ? a : 1; +}; +tree.Color.prototype = { + eval: function () { return this }, + + // + // If we have some transparency, the only way to represent it + // is via `rgba`. Otherwise, we use the hex representation, + // which has better compatibility with older browsers. + // Values are capped between `0` and `255`, rounded and zero-padded. + // + toCSS: function () { + if (this.alpha < 1.0) { + return "rgba(" + this.rgb.map(function (c) { + return Math.round(c); + }).concat(this.alpha).join(', ') + ")"; + } else { + return '#' + this.rgb.map(function (i) { + i = Math.round(i); + i = (i > 255 ? 255 : (i < 0 ? 0 : i)).toString(16); + return i.length === 1 ? '0' + i : i; + }).join(''); + } + }, + + // + // Operations have to be done per-channel, if not, + // channels will spill onto each other. Once we have + // our result, in the form of an integer triplet, + // we create a new Color node to hold the result. + // + operate: function (op, other) { + var result = []; + + if (! (other instanceof tree.Color)) { + other = other.toColor(); + } + + for (var c = 0; c < 3; c++) { + result[c] = tree.operate(op, this.rgb[c], other.rgb[c]); + } + return new(tree.Color)(result, this.alpha + other.alpha); + }, + + toHSL: function () { + var r = this.rgb[0] / 255, + g = this.rgb[1] / 255, + b = this.rgb[2] / 255, + a = this.alpha; + + var max = Math.max(r, g, b), min = Math.min(r, g, b); + var h, s, l = (max + min) / 2, d = max - min; + + if (max === min) { + h = s = 0; + } else { + s = l > 0.5 ? d / (2 - max - min) : d / (max + min); + + switch (max) { + case r: h = (g - b) / d + (g < b ? 6 : 0); break; + case g: h = (b - r) / d + 2; break; + case b: h = (r - g) / d + 4; break; + } + h /= 6; + } + return { h: h * 360, s: s, l: l, a: a }; + } +}; + + +})(require('less/tree')); +(function (tree) { + +tree.Comment = function (value, silent) { + this.value = value; + this.silent = !!silent; +}; +tree.Comment.prototype = { + toCSS: function (env) { + return env.compress ? '' : this.value; + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +// +// A number with a unit +// +tree.Dimension = function (value, unit) { + this.value = parseFloat(value); + this.unit = unit || null; +}; + +tree.Dimension.prototype = { + eval: function () { return this }, + toColor: function () { + return new(tree.Color)([this.value, this.value, this.value]); + }, + toCSS: function () { + var css = this.value + this.unit; + return css; + }, + + // In an operation between two Dimensions, + // we default to the first Dimension's unit, + // so `1px + 2em` will yield `3px`. + // In the future, we could implement some unit + // conversions such that `100cm + 10mm` would yield + // `101cm`. + operate: function (op, other) { + return new(tree.Dimension) + (tree.operate(op, this.value, other.value), + this.unit || other.unit); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Directive = function (name, value) { + this.name = name; + if (Array.isArray(value)) { + this.ruleset = new(tree.Ruleset)([], value); + } else { + this.value = value; + } +}; +tree.Directive.prototype = { + toCSS: function (ctx, env) { + if (this.ruleset) { + this.ruleset.root = true; + return this.name + (env.compress ? '{' : ' {\n ') + + this.ruleset.toCSS(ctx, env).trim().replace(/\n/g, '\n ') + + (env.compress ? '}': '\n}\n'); + } else { + return this.name + ' ' + this.value.toCSS() + ';\n'; + } + }, + eval: function (env) { + env.frames.unshift(this); + this.ruleset = this.ruleset && this.ruleset.eval(env); + env.frames.shift(); + return this; + }, + variable: function (name) { return tree.Ruleset.prototype.variable.call(this.ruleset, name) }, + find: function () { return tree.Ruleset.prototype.find.apply(this.ruleset, arguments) }, + rulesets: function () { return tree.Ruleset.prototype.rulesets.apply(this.ruleset) } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Element = function (combinator, value) { + this.combinator = combinator instanceof tree.Combinator ? + combinator : new(tree.Combinator)(combinator); + this.value = value ? value.trim() : ""; +}; +tree.Element.prototype.toCSS = function (env) { + return this.combinator.toCSS(env || {}) + this.value; +}; + +tree.Combinator = function (value) { + if (value === ' ') { + this.value = ' '; + } else if (value === '& ') { + this.value = '& '; + } else { + this.value = value ? value.trim() : ""; + } +}; +tree.Combinator.prototype.toCSS = function (env) { + return { + '' : '', + ' ' : ' ', + '&' : '', + '& ' : ' ', + ':' : ' :', + '::': '::', + '+' : env.compress ? '+' : ' + ', + '~' : env.compress ? '~' : ' ~ ', + '>' : env.compress ? '>' : ' > ' + }[this.value]; +}; + +})(require('less/tree')); +(function (tree) { + +tree.Expression = function (value) { this.value = value }; +tree.Expression.prototype = { + eval: function (env) { + if (this.value.length > 1) { + return new(tree.Expression)(this.value.map(function (e) { + return e.eval(env); + })); + } else if (this.value.length === 1) { + return this.value[0].eval(env); + } else { + return this; + } + }, + toCSS: function (env) { + return this.value.map(function (e) { + return e.toCSS(env); + }).join(' '); + } +}; + +})(require('less/tree')); +(function (tree) { +// +// CSS @import node +// +// The general strategy here is that we don't want to wait +// for the parsing to be completed, before we start importing +// the file. That's because in the context of a browser, +// most of the time will be spent waiting for the server to respond. +// +// On creation, we push the import path to our import queue, though +// `import,push`, we also pass it a callback, which it'll call once +// the file has been fetched, and parsed. +// +tree.Import = function (path, imports) { + var that = this; + + this._path = path; + + // The '.less' extension is optional + if (path instanceof tree.Quoted) { + this.path = /\.(le?|c)ss$/.test(path.value) ? path.value : path.value + '.less'; + } else { + this.path = path.value.value || path.value; + } + + this.css = /css$/.test(this.path); + + // Only pre-compile .less files + if (! this.css) { + imports.push(this.path, function (root) { + if (! root) { + throw new(Error)("Error parsing " + that.path); + } + that.root = root; + }); + } +}; + +// +// The actual import node doesn't return anything, when converted to CSS. +// The reason is that it's used at the evaluation stage, so that the rules +// it imports can be treated like any other rules. +// +// In `eval`, we make sure all Import nodes get evaluated, recursively, so +// we end up with a flat structure, which can easily be imported in the parent +// ruleset. +// +tree.Import.prototype = { + toCSS: function () { + if (this.css) { + return "@import " + this._path.toCSS() + ';\n'; + } else { + return ""; + } + }, + eval: function (env) { + var ruleset; + + if (this.css) { + return this; + } else { + ruleset = new(tree.Ruleset)(null, this.root.rules.slice(0)); + + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.Import) { + Array.prototype + .splice + .apply(ruleset.rules, + [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + return ruleset.rules; + } + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.JavaScript = function (string, index, escaped) { + this.escaped = escaped; + this.expression = string; + this.index = index; +}; +tree.JavaScript.prototype = { + eval: function (env) { + var result, + that = this, + context = {}; + + var expression = this.expression.replace(/@\{([\w-]+)\}/g, function (_, name) { + return tree.jsify(new(tree.Variable)('@' + name, that.index).eval(env)); + }); + + try { + expression = new(Function)('return (' + expression + ')'); + } catch (e) { + throw { message: "JavaScript evaluation error: `" + expression + "`" , + index: this.index }; + } + + for (var k in env.frames[0].variables()) { + context[k.slice(1)] = { + value: env.frames[0].variables()[k].value, + toJS: function () { + return this.value.eval(env).toCSS(); + } + }; + } + + try { + result = expression.call(context); + } catch (e) { + throw { message: "JavaScript evaluation error: '" + e.name + ': ' + e.message + "'" , + index: this.index }; + } + if (typeof(result) === 'string') { + return new(tree.Quoted)('"' + result + '"', result, this.escaped, this.index); + } else if (Array.isArray(result)) { + return new(tree.Anonymous)(result.join(', ')); + } else { + return new(tree.Anonymous)(result); + } + } +}; + +})(require('less/tree')); + +(function (tree) { + +tree.Keyword = function (value) { this.value = value }; +tree.Keyword.prototype = { + eval: function () { return this }, + toCSS: function () { return this.value } +}; + +})(require('less/tree')); +(function (tree) { + +tree.mixin = {}; +tree.mixin.Call = function (elements, args, index) { + this.selector = new(tree.Selector)(elements); + this.arguments = args; + this.index = index; +}; +tree.mixin.Call.prototype = { + eval: function (env) { + var mixins, args, rules = [], match = false; + + for (var i = 0; i < env.frames.length; i++) { + if ((mixins = env.frames[i].find(this.selector)).length > 0) { + args = this.arguments && this.arguments.map(function (a) { return a.eval(env) }); + for (var m = 0; m < mixins.length; m++) { + if (mixins[m].match(args, env)) { + try { + Array.prototype.push.apply( + rules, mixins[m].eval(env, this.arguments).rules); + match = true; + } catch (e) { + throw { message: e.message, index: e.index, stack: e.stack, call: this.index }; + } + } + } + if (match) { + return rules; + } else { + throw { message: 'No matching definition was found for `' + + this.selector.toCSS().trim() + '(' + + this.arguments.map(function (a) { + return a.toCSS(); + }).join(', ') + ")`", + index: this.index }; + } + } + } + throw { message: this.selector.toCSS().trim() + " is undefined", + index: this.index }; + } +}; + +tree.mixin.Definition = function (name, params, rules) { + this.name = name; + this.selectors = [new(tree.Selector)([new(tree.Element)(null, name)])]; + this.params = params; + this.arity = params.length; + this.rules = rules; + this._lookups = {}; + this.required = params.reduce(function (count, p) { + if (!p.name || (p.name && !p.value)) { return count + 1 } + else { return count } + }, 0); + this.parent = tree.Ruleset.prototype; + this.frames = []; +}; +tree.mixin.Definition.prototype = { + toCSS: function () { return "" }, + variable: function (name) { return this.parent.variable.call(this, name) }, + variables: function () { return this.parent.variables.call(this) }, + find: function () { return this.parent.find.apply(this, arguments) }, + rulesets: function () { return this.parent.rulesets.apply(this) }, + + eval: function (env, args) { + var frame = new(tree.Ruleset)(null, []), context, _arguments = []; + + for (var i = 0, val; i < this.params.length; i++) { + if (this.params[i].name) { + if (val = (args && args[i]) || this.params[i].value) { + frame.rules.unshift(new(tree.Rule)(this.params[i].name, val.eval(env))); + } else { + throw { message: "wrong number of arguments for " + this.name + + ' (' + args.length + ' for ' + this.arity + ')' }; + } + } + } + for (var i = 0; i < Math.max(this.params.length, args && args.length); i++) { + _arguments.push(args[i] || this.params[i].value); + } + frame.rules.unshift(new(tree.Rule)('@arguments', new(tree.Expression)(_arguments).eval(env))); + + return new(tree.Ruleset)(null, this.rules.slice(0)).eval({ + frames: [this, frame].concat(this.frames, env.frames) + }); + }, + match: function (args, env) { + var argsLength = (args && args.length) || 0, len; + + if (argsLength < this.required) { return false } + if ((this.required > 0) && (argsLength > this.params.length)) { return false } + + len = Math.min(argsLength, this.arity); + + for (var i = 0; i < len; i++) { + if (!this.params[i].name) { + if (args[i].eval(env).toCSS() != this.params[i].value.eval(env).toCSS()) { + return false; + } + } + } + return true; + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Operation = function (op, operands) { + this.op = op.trim(); + this.operands = operands; +}; +tree.Operation.prototype.eval = function (env) { + var a = this.operands[0].eval(env), + b = this.operands[1].eval(env), + temp; + + if (a instanceof tree.Dimension && b instanceof tree.Color) { + if (this.op === '*' || this.op === '+') { + temp = b, b = a, a = temp; + } else { + throw { name: "OperationError", + message: "Can't substract or divide a color from a number" }; + } + } + return a.operate(this.op, b); +}; + +tree.operate = function (op, a, b) { + switch (op) { + case '+': return a + b; + case '-': return a - b; + case '*': return a * b; + case '/': return a / b; + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Quoted = function (str, content, escaped, i) { + this.escaped = escaped; + this.value = content || ''; + this.quote = str.charAt(0); + this.index = i; +}; +tree.Quoted.prototype = { + toCSS: function () { + if (this.escaped) { + return this.value; + } else { + return this.quote + this.value + this.quote; + } + }, + eval: function (env) { + var that = this; + var value = this.value.replace(/`([^`]+)`/g, function (_, exp) { + return new(tree.JavaScript)(exp, that.index, true).eval(env).value; + }).replace(/@\{([\w-]+)\}/g, function (_, name) { + var v = new(tree.Variable)('@' + name, that.index).eval(env); + return v.value || v.toCSS(); + }); + return new(tree.Quoted)(this.quote + value + this.quote, value, this.escaped, this.index); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Rule = function (name, value, important, index) { + this.name = name; + this.value = (value instanceof tree.Value) ? value : new(tree.Value)([value]); + this.important = important ? ' ' + important.trim() : ''; + this.index = index; + + if (name.charAt(0) === '@') { + this.variable = true; + } else { this.variable = false } +}; +tree.Rule.prototype.toCSS = function (env) { + if (this.variable) { return "" } + else { + return this.name + (env.compress ? ':' : ': ') + + this.value.toCSS(env) + + this.important + ";"; + } +}; + +tree.Rule.prototype.eval = function (context) { + return new(tree.Rule)(this.name, this.value.eval(context), this.important, this.index); +}; + +tree.Shorthand = function (a, b) { + this.a = a; + this.b = b; +}; + +tree.Shorthand.prototype = { + toCSS: function (env) { + return this.a.toCSS(env) + "/" + this.b.toCSS(env); + }, + eval: function () { return this } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Ruleset = function (selectors, rules) { + this.selectors = selectors; + this.rules = rules; + this._lookups = {}; +}; +tree.Ruleset.prototype = { + eval: function (env) { + var ruleset = new(tree.Ruleset)(this.selectors, this.rules.slice(0)); + + ruleset.root = this.root; + + // push the current ruleset to the frames stack + env.frames.unshift(ruleset); + + // Evaluate imports + if (ruleset.root) { + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.Import) { + Array.prototype.splice + .apply(ruleset.rules, [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + } + + // Store the frames around mixin definitions, + // so they can be evaluated like closures when the time comes. + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.mixin.Definition) { + ruleset.rules[i].frames = env.frames.slice(0); + } + } + + // Evaluate mixin calls. + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.mixin.Call) { + Array.prototype.splice + .apply(ruleset.rules, [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + + // Evaluate everything else + for (var i = 0, rule; i < ruleset.rules.length; i++) { + rule = ruleset.rules[i]; + + if (! (rule instanceof tree.mixin.Definition)) { + ruleset.rules[i] = rule.eval ? rule.eval(env) : rule; + } + } + + // Pop the stack + env.frames.shift(); + + return ruleset; + }, + match: function (args) { + return !args || args.length === 0; + }, + variables: function () { + if (this._variables) { return this._variables } + else { + return this._variables = this.rules.reduce(function (hash, r) { + if (r instanceof tree.Rule && r.variable === true) { + hash[r.name] = r; + } + return hash; + }, {}); + } + }, + variable: function (name) { + return this.variables()[name]; + }, + rulesets: function () { + if (this._rulesets) { return this._rulesets } + else { + return this._rulesets = this.rules.filter(function (r) { + return (r instanceof tree.Ruleset) || (r instanceof tree.mixin.Definition); + }); + } + }, + find: function (selector, self) { + self = self || this; + var rules = [], rule, match, + key = selector.toCSS(); + + if (key in this._lookups) { return this._lookups[key] } + + this.rulesets().forEach(function (rule) { + if (rule !== self) { + for (var j = 0; j < rule.selectors.length; j++) { + if (match = selector.match(rule.selectors[j])) { + if (selector.elements.length > 1) { + Array.prototype.push.apply(rules, rule.find( + new(tree.Selector)(selector.elements.slice(1)), self)); + } else { + rules.push(rule); + } + break; + } + } + } + }); + return this._lookups[key] = rules; + }, + // + // Entry point for code generation + // + // `context` holds an array of arrays. + // + toCSS: function (context, env) { + var css = [], // The CSS output + rules = [], // node.Rule instances + rulesets = [], // node.Ruleset instances + paths = [], // Current selectors + selector, // The fully rendered selector + rule; + + if (! this.root) { + if (context.length === 0) { + paths = this.selectors.map(function (s) { return [s] }); + } else { + this.joinSelectors( paths, context, this.selectors ); + } + } + + // Compile rules and rulesets + for (var i = 0; i < this.rules.length; i++) { + rule = this.rules[i]; + + if (rule.rules || (rule instanceof tree.Directive)) { + rulesets.push(rule.toCSS(paths, env)); + } else if (rule instanceof tree.Comment) { + if (!rule.silent) { + if (this.root) { + rulesets.push(rule.toCSS(env)); + } else { + rules.push(rule.toCSS(env)); + } + } + } else { + if (rule.toCSS && !rule.variable) { + rules.push(rule.toCSS(env)); + } else if (rule.value && !rule.variable) { + rules.push(rule.value.toString()); + } + } + } + + rulesets = rulesets.join(''); + + // If this is the root node, we don't render + // a selector, or {}. + // Otherwise, only output if this ruleset has rules. + if (this.root) { + css.push(rules.join(env.compress ? '' : '\n')); + } else { + if (rules.length > 0) { + selector = paths.map(function (p) { + return p.map(function (s) { + return s.toCSS(env); + }).join('').trim(); + }).join(env.compress ? ',' : (paths.length > 3 ? ',\n' : ', ')); + css.push(selector, + (env.compress ? '{' : ' {\n ') + + rules.join(env.compress ? '' : '\n ') + + (env.compress ? '}' : '\n}\n')); + } + } + css.push(rulesets); + + return css.join('') + (env.compress ? '\n' : ''); + }, + + joinSelectors: function( paths, context, selectors ) { + for (var s = 0; s < selectors.length; s++) { + this.joinSelector(paths, context, selectors[s]); + } + }, + + joinSelector: function( paths, context, selector ) { + var before = [], after = [], beforeElements = [], afterElements = [], hasParentSelector = false, el; + + for (var i = 0; i < selector.elements.length; i++) { + el = selector.elements[i]; + if (el.combinator.value[0] === '&') { + hasParentSelector = true; + } + if(!hasParentSelector) { + beforeElements.push(el); + } else { + afterElements.push(el); + } + } + + if(!hasParentSelector) { + afterElements = beforeElements; + beforeElements = []; + } + + if(beforeElements.length > 0) { + before.push(new (tree.Selector)(beforeElements)); + } + if(afterElements.length > 0) { + after.push(new (tree.Selector)(afterElements)); + } + + for (var c = 0; c < context.length; c++) { + paths.push(before.concat(context[c]).concat(after)); + } + } +}; +})(require('less/tree')); +(function (tree) { + +tree.Selector = function (elements) { + this.elements = elements; + if (this.elements[0].combinator.value === "") { + this.elements[0].combinator.value = ' '; + } +}; +tree.Selector.prototype.match = function (other) { + if (this.elements[0].value === other.elements[0].value) { + return true; + } else { + return false; + } +}; +tree.Selector.prototype.toCSS = function (env) { + if (this._css) { return this._css } + + return this._css = this.elements.map(function (e) { + if (typeof(e) === 'string') { + return ' ' + e.trim(); + } else { + return e.toCSS(env); + } + }).join(''); +}; + +})(require('less/tree')); +(function (tree) { + +tree.URL = function (val, paths) { + if (val.data) { + this.attrs = val; + } else { + // Add the base path if the URL is relative and we are in the browser + if (!/^(?:https?:\/|file:\/|data:\/)?\//.test(val.value) && paths.length > 0 && typeof(window) !== 'undefined') { + val.value = paths[0] + (val.value.charAt(0) === '/' ? val.value.slice(1) : val.value); + } + this.value = val; + this.paths = paths; + } +}; +tree.URL.prototype = { + toCSS: function () { + return "url(" + (this.attrs ? 'data:' + this.attrs.mime + this.attrs.charset + this.attrs.base64 + this.attrs.data + : this.value.toCSS()) + ")"; + }, + eval: function (ctx) { + return this.attrs ? this : new(tree.URL)(this.value.eval(ctx), this.paths); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Value = function (value) { + this.value = value; + this.is = 'value'; +}; +tree.Value.prototype = { + eval: function (env) { + if (this.value.length === 1) { + return this.value[0].eval(env); + } else { + return new(tree.Value)(this.value.map(function (v) { + return v.eval(env); + })); + } + }, + toCSS: function (env) { + return this.value.map(function (e) { + return e.toCSS(env); + }).join(env.compress ? ',' : ', '); + } +}; + +})(require('less/tree')); +(function (tree) { + +tree.Variable = function (name, index) { this.name = name, this.index = index }; +tree.Variable.prototype = { + eval: function (env) { + var variable, v, name = this.name; + + if (name.indexOf('@@') == 0) { + name = '@' + new(tree.Variable)(name.slice(1)).eval(env).value; + } + + if (variable = tree.find(env.frames, function (frame) { + if (v = frame.variable(name)) { + return v.value.eval(env); + } + })) { return variable } + else { + throw { message: "variable " + name + " is undefined", + index: this.index }; + } + } +}; + +})(require('less/tree')); +require('less/tree').find = function (obj, fun) { + for (var i = 0, r; i < obj.length; i++) { + if (r = fun.call(obj, obj[i])) { return r } + } + return null; +}; +require('less/tree').jsify = function (obj) { + if (Array.isArray(obj.value) && (obj.value.length > 1)) { + return '[' + obj.value.map(function (v) { return v.toCSS(false) }).join(', ') + ']'; + } else { + return obj.toCSS(false); + } +}; +var name; + +function loadStyleSheet(sheet, callback, reload, remaining) { + var sheetName = name.slice(0, name.lastIndexOf('/') + 1) + sheet.href; + var input = readFile(sheetName); + var parser = new less.Parser(); + parser.parse(input, function (e, root) { + if (e) { + print("Error: " + e); + quit(1); + } + callback(root, sheet, { local: false, lastModified: 0, remaining: remaining }); + }); + + // callback({}, sheet, { local: true, remaining: remaining }); +} + +function writeFile(filename, content) { + var fstream = new java.io.FileWriter(filename); + var out = new java.io.BufferedWriter(fstream); + out.write(content); + out.close(); +} + +// Command line integration via Rhino +(function (args) { + name = args[0]; + var output = args[1]; + + if (!name) { + print('No files present in the fileset; Check your pattern match in build.xml'); + quit(1); + } + path = name.split("/");path.pop();path=path.join("/") + + var input = readFile(name); + + if (!input) { + print('lesscss: couldn\'t open file ' + name); + quit(1); + } + + var result; + var parser = new less.Parser(); + parser.parse(input, function (e, root) { + if (e) { + quit(1); + } else { + result = root.toCSS(); + if (output) { + writeFile(output, result); + print("Written to " + output); + } else { + print(result); + } + quit(0); + } + }); + print("done"); +}(arguments)); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/browser.js b/src/dashboard/src/media/vendor/less.js/lib/less/browser.js new file mode 100644 index 0000000000..cba4c3b61d --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/browser.js @@ -0,0 +1,375 @@ +// +// browser.js - client-side engine +// + +var isFileProtocol = (location.protocol === 'file:' || + location.protocol === 'chrome:' || + location.protocol === 'chrome-extension:' || + location.protocol === 'resource:'); + +less.env = less.env || (location.hostname == '127.0.0.1' || + location.hostname == '0.0.0.0' || + location.hostname == 'localhost' || + location.port.length > 0 || + isFileProtocol ? 'development' + : 'production'); + +// Load styles asynchronously (default: false) +// +// This is set to `false` by default, so that the body +// doesn't start loading before the stylesheets are parsed. +// Setting this to `true` can result in flickering. +// +less.async = false; + +// Interval between watch polls +less.poll = less.poll || (isFileProtocol ? 1000 : 1500); + +// +// Watch mode +// +less.watch = function () { return this.watchMode = true }; +less.unwatch = function () { return this.watchMode = false }; + +if (less.env === 'development') { + less.optimization = 0; + + if (/!watch/.test(location.hash)) { + less.watch(); + } + less.watchTimer = setInterval(function () { + if (less.watchMode) { + loadStyleSheets(function (root, sheet, env) { + if (root) { + createCSS(root.toCSS(), sheet, env.lastModified); + } + }); + } + }, less.poll); +} else { + less.optimization = 3; +} + +var cache; + +try { + cache = (typeof(window.localStorage) === 'undefined') ? null : window.localStorage; +} catch (_) { + cache = null; +} + +// +// Get all tags with the 'rel' attribute set to "stylesheet/less" +// +var links = document.getElementsByTagName('link'); +var typePattern = /^text\/(x-)?less$/; + +less.sheets = []; + +for (var i = 0; i < links.length; i++) { + if (links[i].rel === 'stylesheet/less' || (links[i].rel.match(/stylesheet/) && + (links[i].type.match(typePattern)))) { + less.sheets.push(links[i]); + } +} + + +less.refresh = function (reload) { + var startTime, endTime; + startTime = endTime = new(Date); + + loadStyleSheets(function (root, sheet, env) { + if (env.local) { + log("loading " + sheet.href + " from cache."); + } else { + log("parsed " + sheet.href + " successfully."); + createCSS(root.toCSS(), sheet, env.lastModified); + } + log("css for " + sheet.href + " generated in " + (new(Date) - endTime) + 'ms'); + (env.remaining === 0) && log("css generated in " + (new(Date) - startTime) + 'ms'); + endTime = new(Date); + }, reload); + + loadStyles(); +}; +less.refreshStyles = loadStyles; + +less.refresh(less.env === 'development'); + +function loadStyles() { + var styles = document.getElementsByTagName('style'); + for (var i = 0; i < styles.length; i++) { + if (styles[i].type.match(typePattern)) { + new(less.Parser)().parse(styles[i].innerHTML || '', function (e, tree) { + var css = tree.toCSS(); + var style = styles[i]; + try { + style.innerHTML = css; + } catch (_) { + style.styleSheets.cssText = css; + } + style.type = 'text/css'; + }); + } + } +} + +function loadStyleSheets(callback, reload) { + for (var i = 0; i < less.sheets.length; i++) { + loadStyleSheet(less.sheets[i], callback, reload, less.sheets.length - (i + 1)); + } +} + +function loadStyleSheet(sheet, callback, reload, remaining) { + var url = window.location.href.replace(/[#?].*$/, ''); + var href = sheet.href.replace(/\?.*$/, ''); + var css = cache && cache.getItem(href); + var timestamp = cache && cache.getItem(href + ':timestamp'); + var styles = { css: css, timestamp: timestamp }; + + // Stylesheets in IE don't always return the full path + if (! /^(https?|file):/.test(href)) { + if (href.charAt(0) == "/") { + href = window.location.protocol + "//" + window.location.host + href; + } else { + href = url.slice(0, url.lastIndexOf('/') + 1) + href; + } + } + + xhr(sheet.href, sheet.type, function (data, lastModified) { + if (!reload && styles && lastModified && + (new(Date)(lastModified).valueOf() === + new(Date)(styles.timestamp).valueOf())) { + // Use local copy + createCSS(styles.css, sheet); + callback(null, sheet, { local: true, remaining: remaining }); + } else { + // Use remote copy (re-parse) + try { + new(less.Parser)({ + optimization: less.optimization, + paths: [href.replace(/[\w\.-]+$/, '')], + mime: sheet.type + }).parse(data, function (e, root) { + if (e) { return error(e, href) } + try { + callback(root, sheet, { local: false, lastModified: lastModified, remaining: remaining }); + removeNode(document.getElementById('less-error-message:' + extractId(href))); + } catch (e) { + error(e, href); + } + }); + } catch (e) { + error(e, href); + } + } + }, function (status, url) { + throw new(Error)("Couldn't load " + url + " (" + status + ")"); + }); +} + +function extractId(href) { + return href.replace(/^[a-z]+:\/\/?[^\/]+/, '' ) // Remove protocol & domain + .replace(/^\//, '' ) // Remove root / + .replace(/\?.*$/, '' ) // Remove query + .replace(/\.[^\.\/]+$/, '' ) // Remove file extension + .replace(/[^\.\w-]+/g, '-') // Replace illegal characters + .replace(/\./g, ':'); // Replace dots with colons(for valid id) +} + +function createCSS(styles, sheet, lastModified) { + var css; + + // Strip the query-string + var href = sheet.href ? sheet.href.replace(/\?.*$/, '') : ''; + + // If there is no title set, use the filename, minus the extension + var id = 'less:' + (sheet.title || extractId(href)); + + // If the stylesheet doesn't exist, create a new node + if ((css = document.getElementById(id)) === null) { + css = document.createElement('style'); + css.type = 'text/css'; + css.media = sheet.media || 'screen'; + css.id = id; + document.getElementsByTagName('head')[0].appendChild(css); + } + + if (css.styleSheet) { // IE + try { + css.styleSheet.cssText = styles; + } catch (e) { + throw new(Error)("Couldn't reassign styleSheet.cssText."); + } + } else { + (function (node) { + if (css.childNodes.length > 0) { + if (css.firstChild.nodeValue !== node.nodeValue) { + css.replaceChild(node, css.firstChild); + } + } else { + css.appendChild(node); + } + })(document.createTextNode(styles)); + } + + // Don't update the local store if the file wasn't modified + if (lastModified && cache) { + log('saving ' + href + ' to cache.'); + cache.setItem(href, styles); + cache.setItem(href + ':timestamp', lastModified); + } +} + +function xhr(url, type, callback, errback) { + var xhr = getXMLHttpRequest(); + var async = isFileProtocol ? false : less.async; + + if (typeof(xhr.overrideMimeType) === 'function') { + xhr.overrideMimeType('text/css'); + } + xhr.open('GET', url, async); + xhr.setRequestHeader('Accept', type || 'text/x-less, text/css; q=0.9, */*; q=0.5'); + xhr.send(null); + + if (isFileProtocol) { + if (xhr.status === 0) { + callback(xhr.responseText); + } else { + errback(xhr.status, url); + } + } else if (async) { + xhr.onreadystatechange = function () { + if (xhr.readyState == 4) { + handleResponse(xhr, callback, errback); + } + }; + } else { + handleResponse(xhr, callback, errback); + } + + function handleResponse(xhr, callback, errback) { + if (xhr.status >= 200 && xhr.status < 300) { + callback(xhr.responseText, + xhr.getResponseHeader("Last-Modified")); + } else if (typeof(errback) === 'function') { + errback(xhr.status, url); + } + } +} + +function getXMLHttpRequest() { + if (window.XMLHttpRequest) { + return new(XMLHttpRequest); + } else { + try { + return new(ActiveXObject)("MSXML2.XMLHTTP.3.0"); + } catch (e) { + log("browser doesn't support AJAX."); + return null; + } + } +} + +function removeNode(node) { + return node && node.parentNode.removeChild(node); +} + +function log(str) { + if (less.env == 'development' && typeof(console) !== "undefined") { console.log('less: ' + str) } +} + +function error(e, href) { + var id = 'less-error-message:' + extractId(href); + + var template = ['
      ', + '
    • {0}
    • ', + '
    • {current}
    • ', + '
    • {2}
    • ', + '
    '].join('\n'); + + var elem = document.createElement('div'), timer, content; + + elem.id = id; + elem.className = "less-error-message"; + + content = '

    ' + (e.message || 'There is an error in your .less file') + + '

    ' + '

    ' + href + " "; + + if (e.extract) { + content += 'on line ' + e.line + ', column ' + (e.column + 1) + ':

    ' + + template.replace(/\[(-?\d)\]/g, function (_, i) { + return (parseInt(e.line) + parseInt(i)) || ''; + }).replace(/\{(\d)\}/g, function (_, i) { + return e.extract[parseInt(i)] || ''; + }).replace(/\{current\}/, e.extract[1].slice(0, e.column) + '' + + e.extract[1].slice(e.column) + ''); + } + elem.innerHTML = content; + + // CSS for error messages + createCSS([ + '.less-error-message ul, .less-error-message li {', + 'list-style-type: none;', + 'margin-right: 15px;', + 'padding: 4px 0;', + 'margin: 0;', + '}', + '.less-error-message label {', + 'font-size: 12px;', + 'margin-right: 15px;', + 'padding: 4px 0;', + 'color: #cc7777;', + '}', + '.less-error-message pre {', + 'color: #ee4444;', + 'padding: 4px 0;', + 'margin: 0;', + 'display: inline-block;', + '}', + '.less-error-message pre.ctx {', + 'color: #dd4444;', + '}', + '.less-error-message h3 {', + 'font-size: 20px;', + 'font-weight: bold;', + 'padding: 15px 0 5px 0;', + 'margin: 0;', + '}', + '.less-error-message a {', + 'color: #10a', + '}', + '.less-error-message .error {', + 'color: red;', + 'font-weight: bold;', + 'padding-bottom: 2px;', + 'border-bottom: 1px dashed red;', + '}' + ].join('\n'), { title: 'error-message' }); + + elem.style.cssText = [ + "font-family: Arial, sans-serif", + "border: 1px solid #e00", + "background-color: #eee", + "border-radius: 5px", + "-webkit-border-radius: 5px", + "-moz-border-radius: 5px", + "color: #e00", + "padding: 15px", + "margin-bottom: 15px" + ].join(';'); + + if (less.env == 'development') { + timer = setInterval(function () { + if (document.body) { + if (document.getElementById(id)) { + document.body.replaceChild(elem, document.getElementById(id)); + } else { + document.body.insertBefore(elem, document.body.firstChild); + } + clearInterval(timer); + } + }, 10); + } +} + diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/functions.js b/src/dashboard/src/media/vendor/less.js/lib/less/functions.js new file mode 100644 index 0000000000..fc9d86f1df --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/functions.js @@ -0,0 +1,185 @@ +(function (tree) { + +tree.functions = { + rgb: function (r, g, b) { + return this.rgba(r, g, b, 1.0); + }, + rgba: function (r, g, b, a) { + var rgb = [r, g, b].map(function (c) { return number(c) }), + a = number(a); + return new(tree.Color)(rgb, a); + }, + hsl: function (h, s, l) { + return this.hsla(h, s, l, 1.0); + }, + hsla: function (h, s, l, a) { + h = (number(h) % 360) / 360; + s = number(s); l = number(l); a = number(a); + + var m2 = l <= 0.5 ? l * (s + 1) : l + s - l * s; + var m1 = l * 2 - m2; + + return this.rgba(hue(h + 1/3) * 255, + hue(h) * 255, + hue(h - 1/3) * 255, + a); + + function hue(h) { + h = h < 0 ? h + 1 : (h > 1 ? h - 1 : h); + if (h * 6 < 1) return m1 + (m2 - m1) * h * 6; + else if (h * 2 < 1) return m2; + else if (h * 3 < 2) return m1 + (m2 - m1) * (2/3 - h) * 6; + else return m1; + } + }, + hue: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().h)); + }, + saturation: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().s * 100), '%'); + }, + lightness: function (color) { + return new(tree.Dimension)(Math.round(color.toHSL().l * 100), '%'); + }, + alpha: function (color) { + return new(tree.Dimension)(color.toHSL().a); + }, + saturate: function (color, amount) { + var hsl = color.toHSL(); + + hsl.s += amount.value / 100; + hsl.s = clamp(hsl.s); + return hsla(hsl); + }, + desaturate: function (color, amount) { + var hsl = color.toHSL(); + + hsl.s -= amount.value / 100; + hsl.s = clamp(hsl.s); + return hsla(hsl); + }, + lighten: function (color, amount) { + var hsl = color.toHSL(); + + hsl.l += amount.value / 100; + hsl.l = clamp(hsl.l); + return hsla(hsl); + }, + darken: function (color, amount) { + var hsl = color.toHSL(); + + hsl.l -= amount.value / 100; + hsl.l = clamp(hsl.l); + return hsla(hsl); + }, + fadein: function (color, amount) { + var hsl = color.toHSL(); + + hsl.a += amount.value / 100; + hsl.a = clamp(hsl.a); + return hsla(hsl); + }, + fadeout: function (color, amount) { + var hsl = color.toHSL(); + + hsl.a -= amount.value / 100; + hsl.a = clamp(hsl.a); + return hsla(hsl); + }, + fade: function (color, amount) { + var hsl = color.toHSL(); + + hsl.a = amount.value / 100; + hsl.a = clamp(hsl.a); + return hsla(hsl); + }, + spin: function (color, amount) { + var hsl = color.toHSL(); + var hue = (hsl.h + amount.value) % 360; + + hsl.h = hue < 0 ? 360 + hue : hue; + + return hsla(hsl); + }, + // + // Copyright (c) 2006-2009 Hampton Catlin, Nathan Weizenbaum, and Chris Eppstein + // http://sass-lang.com + // + mix: function (color1, color2, weight) { + var p = weight.value / 100.0; + var w = p * 2 - 1; + var a = color1.toHSL().a - color2.toHSL().a; + + var w1 = (((w * a == -1) ? w : (w + a) / (1 + w * a)) + 1) / 2.0; + var w2 = 1 - w1; + + var rgb = [color1.rgb[0] * w1 + color2.rgb[0] * w2, + color1.rgb[1] * w1 + color2.rgb[1] * w2, + color1.rgb[2] * w1 + color2.rgb[2] * w2]; + + var alpha = color1.alpha * p + color2.alpha * (1 - p); + + return new(tree.Color)(rgb, alpha); + }, + greyscale: function (color) { + return this.desaturate(color, new(tree.Dimension)(100)); + }, + e: function (str) { + return new(tree.Anonymous)(str instanceof tree.JavaScript ? str.evaluated : str); + }, + escape: function (str) { + return new(tree.Anonymous)(encodeURI(str.value).replace(/=/g, "%3D").replace(/:/g, "%3A").replace(/#/g, "%23").replace(/;/g, "%3B").replace(/\(/g, "%28").replace(/\)/g, "%29")); + }, + '%': function (quoted /* arg, arg, ...*/) { + var args = Array.prototype.slice.call(arguments, 1), + str = quoted.value; + + for (var i = 0; i < args.length; i++) { + str = str.replace(/%[sda]/i, function(token) { + var value = token.match(/s/i) ? args[i].value : args[i].toCSS(); + return token.match(/[A-Z]$/) ? encodeURIComponent(value) : value; + }); + } + str = str.replace(/%%/g, '%'); + return new(tree.Quoted)('"' + str + '"', str); + }, + round: function (n) { + if (n instanceof tree.Dimension) { + return new(tree.Dimension)(Math.round(number(n)), n.unit); + } else if (typeof(n) === 'number') { + return Math.round(n); + } else { + throw { + error: "RuntimeError", + message: "math functions take numbers as parameters" + }; + } + }, + argb: function (color) { + return new(tree.Anonymous)(color.toARGB()); + + } +}; + +function hsla(hsla) { + return tree.functions.hsla(hsla.h, hsla.s, hsla.l, hsla.a); +} + +function number(n) { + if (n instanceof tree.Dimension) { + return parseFloat(n.unit == '%' ? n.value / 100 : n.value); + } else if (typeof(n) === 'number') { + return n; + } else { + throw { + error: "RuntimeError", + message: "color functions take numbers as parameters" + }; + } +} + +function clamp(val) { + return Math.min(1, Math.max(0, val)); +} + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/index.js b/src/dashboard/src/media/vendor/less.js/lib/less/index.js new file mode 100644 index 0000000000..39c40ca2fa --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/index.js @@ -0,0 +1,139 @@ +var path = require('path'), + sys = require('sys'), + fs = require('fs'); + +require.paths.unshift(path.join(__dirname, '..')); + +var less = { + version: [1, 1, 3], + Parser: require('less/parser').Parser, + importer: require('less/parser').importer, + tree: require('less/tree'), + render: function (input, options, callback) { + options = options || {}; + + if (typeof(options) === 'function') { + callback = options, options = {}; + } + + var parser = new(this.Parser)(options), + ee; + + if (callback) { + parser.parse(input, function (e, root) { + callback(e, root.toCSS(options)); + }); + } else { + ee = new(require('events').EventEmitter); + + process.nextTick(function () { + parser.parse(input, function (e, root) { + if (e) { ee.emit('error', e) } + else { ee.emit('success', root.toCSS(options)) } + }); + }); + return ee; + } + }, + writeError: function (ctx, options) { + var message = ""; + var extract = ctx.extract; + var error = []; + var stylize = options.color ? less.stylize : function (str) { return str }; + + options = options || {}; + + if (options.silent) { return } + + if (!ctx.index) { + return sys.error(ctx.stack || ctx.message); + } + + if (typeof(extract[0]) === 'string') { + error.push(stylize((ctx.line - 1) + ' ' + extract[0], 'grey')); + } + + error.push(ctx.line + ' ' + extract[1].slice(0, ctx.column) + + stylize(stylize(extract[1][ctx.column], 'bold') + + extract[1].slice(ctx.column + 1), 'yellow')); + + if (typeof(extract[2]) === 'string') { + error.push(stylize((ctx.line + 1) + ' ' + extract[2], 'grey')); + } + error = error.join('\n') + '\033[0m\n'; + + message += stylize(ctx.message, 'red'); + ctx.filename && (message += stylize(' in ', 'red') + ctx.filename); + + sys.error(message, error); + + if (ctx.callLine) { + sys.error(stylize('from ', 'red') + (ctx.filename || '')); + sys.error(stylize(ctx.callLine, 'grey') + ' ' + ctx.callExtract); + } + if (ctx.stack) { sys.error(stylize(ctx.stack, 'red')) } + } +}; + +['color', 'directive', 'operation', 'dimension', + 'keyword', 'variable', 'ruleset', 'element', + 'selector', 'quoted', 'expression', 'rule', + 'call', 'url', 'alpha', 'import', + 'mixin', 'comment', 'anonymous', 'value', 'javascript' +].forEach(function (n) { + require(path.join('less', 'tree', n)); +}); + +less.Parser.importer = function (file, paths, callback) { + var pathname; + + paths.unshift('.'); + + for (var i = 0; i < paths.length; i++) { + try { + pathname = path.join(paths[i], file); + fs.statSync(pathname); + break; + } catch (e) { + pathname = null; + } + } + + if (pathname) { + fs.readFile(pathname, 'utf-8', function(e, data) { + if (e) sys.error(e); + + new(less.Parser)({ + paths: [path.dirname(pathname)].concat(paths), + filename: pathname + }).parse(data, function (e, root) { + if (e) less.writeError(e); + callback(root); + }); + }); + } else { + sys.error("file '" + file + "' wasn't found.\n"); + process.exit(1); + } +} + +require('less/functions'); + +for (var k in less) { exports[k] = less[k] } + +// Stylize a string +function stylize(str, style) { + var styles = { + 'bold' : [1, 22], + 'inverse' : [7, 27], + 'underline' : [4, 24], + 'yellow' : [33, 39], + 'green' : [32, 39], + 'red' : [31, 39], + 'grey' : [90, 39] + }; + return '\033[' + styles[style][0] + 'm' + str + + '\033[' + styles[style][1] + 'm'; +} +less.stylize = stylize; + diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/parser.js b/src/dashboard/src/media/vendor/less.js/lib/less/parser.js new file mode 100644 index 0000000000..1e04645d7a --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/parser.js @@ -0,0 +1,1115 @@ +var less, tree; + +if (typeof environment === "object" && ({}).toString.call(environment) === "[object Environment]") { + // Rhino + // Details on how to detect Rhino: https://github.com/ringo/ringojs/issues/88 + less = {}; + tree = less.tree = {}; + less.mode = 'rhino'; +} else if (typeof(window) === 'undefined') { + // Node.js + less = exports, + tree = require('less/tree'); + less.mode = 'rhino'; +} else { + // Browser + if (typeof(window.less) === 'undefined') { window.less = {} } + less = window.less, + tree = window.less.tree = {}; + less.mode = 'browser'; +} +// +// less.js - parser +// +// A relatively straight-forward predictive parser. +// There is no tokenization/lexing stage, the input is parsed +// in one sweep. +// +// To make the parser fast enough to run in the browser, several +// optimization had to be made: +// +// - Matching and slicing on a huge input is often cause of slowdowns. +// The solution is to chunkify the input into smaller strings. +// The chunks are stored in the `chunks` var, +// `j` holds the current chunk index, and `current` holds +// the index of the current chunk in relation to `input`. +// This gives us an almost 4x speed-up. +// +// - In many cases, we don't need to match individual tokens; +// for example, if a value doesn't hold any variables, operations +// or dynamic references, the parser can effectively 'skip' it, +// treating it as a literal. +// An example would be '1px solid #000' - which evaluates to itself, +// we don't need to know what the individual components are. +// The drawback, of course is that you don't get the benefits of +// syntax-checking on the CSS. This gives us a 50% speed-up in the parser, +// and a smaller speed-up in the code-gen. +// +// +// Token matching is done with the `$` function, which either takes +// a terminal string or regexp, or a non-terminal function to call. +// It also takes care of moving all the indices forwards. +// +// +less.Parser = function Parser(env) { + var input, // LeSS input string + i, // current index in `input` + j, // current chunk + temp, // temporarily holds a chunk's state, for backtracking + memo, // temporarily holds `i`, when backtracking + furthest, // furthest index the parser has gone to + chunks, // chunkified input + current, // index of current chunk, in `input` + parser; + + var that = this; + + // This function is called after all files + // have been imported through `@import`. + var finish = function () {}; + + var imports = this.imports = { + paths: env && env.paths || [], // Search paths, when importing + queue: [], // Files which haven't been imported yet + files: {}, // Holds the imported parse trees + mime: env && env.mime, // MIME type of .less files + push: function (path, callback) { + var that = this; + this.queue.push(path); + + // + // Import a file asynchronously + // + less.Parser.importer(path, this.paths, function (root) { + that.queue.splice(that.queue.indexOf(path), 1); // Remove the path from the queue + that.files[path] = root; // Store the root + + callback(root); + + if (that.queue.length === 0) { finish() } // Call `finish` if we're done importing + }, env); + } + }; + + function save() { temp = chunks[j], memo = i, current = i } + function restore() { chunks[j] = temp, i = memo, current = i } + + function sync() { + if (i > current) { + chunks[j] = chunks[j].slice(i - current); + current = i; + } + } + // + // Parse from a token, regexp or string, and move forward if match + // + function $(tok) { + var match, args, length, c, index, endIndex, k, mem; + + // + // Non-terminal + // + if (tok instanceof Function) { + return tok.call(parser.parsers); + // + // Terminal + // + // Either match a single character in the input, + // or match a regexp in the current chunk (chunk[j]). + // + } else if (typeof(tok) === 'string') { + match = input.charAt(i) === tok ? tok : null; + length = 1; + sync (); + } else { + sync (); + + if (match = tok.exec(chunks[j])) { + length = match[0].length; + } else { + return null; + } + } + + // The match is confirmed, add the match length to `i`, + // and consume any extra white-space characters (' ' || '\n') + // which come after that. The reason for this is that LeSS's + // grammar is mostly white-space insensitive. + // + if (match) { + mem = i += length; + endIndex = i + chunks[j].length - length; + + while (i < endIndex) { + c = input.charCodeAt(i); + if (! (c === 32 || c === 10 || c === 9)) { break } + i++; + } + chunks[j] = chunks[j].slice(length + (i - mem)); + current = i; + + if (chunks[j].length === 0 && j < chunks.length - 1) { j++ } + + if(typeof(match) === 'string') { + return match; + } else { + return match.length === 1 ? match[0] : match; + } + } + } + + // Same as $(), but don't change the state of the parser, + // just return the match. + function peek(tok) { + if (typeof(tok) === 'string') { + return input.charAt(i) === tok; + } else { + if (tok.test(chunks[j])) { + return true; + } else { + return false; + } + } + } + + this.env = env = env || {}; + + // The optimization level dictates the thoroughness of the parser, + // the lower the number, the less nodes it will create in the tree. + // This could matter for debugging, or if you want to access + // the individual nodes in the tree. + this.optimization = ('optimization' in this.env) ? this.env.optimization : 1; + + this.env.filename = this.env.filename || null; + + // + // The Parser + // + return parser = { + + imports: imports, + // + // Parse an input string into an abstract syntax tree, + // call `callback` when done. + // + parse: function (str, callback) { + var root, start, end, zone, line, lines, buff = [], c, error = null; + + i = j = current = furthest = 0; + chunks = []; + input = str.replace(/\r\n/g, '\n'); + + // Split the input into chunks. + chunks = (function (chunks) { + var j = 0, + skip = /[^"'`\{\}\/\(\)]+/g, + comment = /\/\*(?:[^*]|\*+[^\/*])*\*+\/|\/\/.*/g, + level = 0, + match, + chunk = chunks[0], + inParam, + inString; + + for (var i = 0, c, cc; i < input.length; i++) { + skip.lastIndex = i; + if (match = skip.exec(input)) { + if (match.index === i) { + i += match[0].length; + chunk.push(match[0]); + } + } + c = input.charAt(i); + comment.lastIndex = i; + + if (!inString && !inParam && c === '/') { + cc = input.charAt(i + 1); + if (cc === '/' || cc === '*') { + if (match = comment.exec(input)) { + if (match.index === i) { + i += match[0].length; + chunk.push(match[0]); + c = input.charAt(i); + } + } + } + } + + if (c === '{' && !inString && !inParam) { level ++; + chunk.push(c); + } else if (c === '}' && !inString && !inParam) { level --; + chunk.push(c); + chunks[++j] = chunk = []; + } else if (c === '(' && !inString && !inParam) { + chunk.push(c); + inParam = true; + } else if (c === ')' && !inString && inParam) { + chunk.push(c); + inParam = false; + } else { + if (c === '"' || c === "'" || c === '`') { + if (! inString) { + inString = c; + } else { + inString = inString === c ? false : inString; + } + } + chunk.push(c); + } + } + if (level > 0) { + throw { + type: 'Syntax', + message: "Missing closing `}`", + filename: env.filename + }; + } + + return chunks.map(function (c) { return c.join('') });; + })([[]]); + + // Start with the primary rule. + // The whole syntax tree is held under a Ruleset node, + // with the `root` property set to true, so no `{}` are + // output. The callback is called when the input is parsed. + root = new(tree.Ruleset)([], $(this.parsers.primary)); + root.root = true; + + root.toCSS = (function (evaluate) { + var line, lines, column; + + return function (options, variables) { + var frames = []; + + options = options || {}; + // + // Allows setting variables with a hash, so: + // + // `{ color: new(tree.Color)('#f01') }` will become: + // + // new(tree.Rule)('@color', + // new(tree.Value)([ + // new(tree.Expression)([ + // new(tree.Color)('#f01') + // ]) + // ]) + // ) + // + if (typeof(variables) === 'object' && !Array.isArray(variables)) { + variables = Object.keys(variables).map(function (k) { + var value = variables[k]; + + if (! (value instanceof tree.Value)) { + if (! (value instanceof tree.Expression)) { + value = new(tree.Expression)([value]); + } + value = new(tree.Value)([value]); + } + return new(tree.Rule)('@' + k, value, false, 0); + }); + frames = [new(tree.Ruleset)(null, variables)]; + } + + try { + var css = evaluate.call(this, { frames: frames }) + .toCSS([], { compress: options.compress || false }); + } catch (e) { + lines = input.split('\n'); + line = getLine(e.index); + + for (var n = e.index, column = -1; + n >= 0 && input.charAt(n) !== '\n'; + n--) { column++ } + + throw { + type: e.type, + message: e.message, + filename: env.filename, + index: e.index, + line: typeof(line) === 'number' ? line + 1 : null, + callLine: e.call && (getLine(e.call) + 1), + callExtract: lines[getLine(e.call)], + stack: e.stack, + column: column, + extract: [ + lines[line - 1], + lines[line], + lines[line + 1] + ] + }; + } + if (options.compress) { + return css.replace(/(\s)+/g, "$1"); + } else { + return css; + } + + function getLine(index) { + return index ? (input.slice(0, index).match(/\n/g) || "").length : null; + } + }; + })(root.eval); + + // If `i` is smaller than the `input.length - 1`, + // it means the parser wasn't able to parse the whole + // string, so we've got a parsing error. + // + // We try to extract a \n delimited string, + // showing the line where the parse error occured. + // We split it up into two parts (the part which parsed, + // and the part which didn't), so we can color them differently. + if (i < input.length - 1) { + i = furthest; + lines = input.split('\n'); + line = (input.slice(0, i).match(/\n/g) || "").length + 1; + + for (var n = i, column = -1; n >= 0 && input.charAt(n) !== '\n'; n--) { column++ } + + error = { + name: "ParseError", + message: "Syntax Error on line " + line, + index: i, + filename: env.filename, + line: line, + column: column, + extract: [ + lines[line - 2], + lines[line - 1], + lines[line] + ] + }; + } + + if (this.imports.queue.length > 0) { + finish = function () { callback(error, root) }; + } else { + callback(error, root); + } + }, + + // + // Here in, the parsing rules/functions + // + // The basic structure of the syntax tree generated is as follows: + // + // Ruleset -> Rule -> Value -> Expression -> Entity + // + // Here's some LESS code: + // + // .class { + // color: #fff; + // border: 1px solid #000; + // width: @w + 4px; + // > .child {...} + // } + // + // And here's what the parse tree might look like: + // + // Ruleset (Selector '.class', [ + // Rule ("color", Value ([Expression [Color #fff]])) + // Rule ("border", Value ([Expression [Dimension 1px][Keyword "solid"][Color #000]])) + // Rule ("width", Value ([Expression [Operation "+" [Variable "@w"][Dimension 4px]]])) + // Ruleset (Selector [Element '>', '.child'], [...]) + // ]) + // + // In general, most rules will try to parse a token with the `$()` function, and if the return + // value is truly, will return a new node, of the relevant type. Sometimes, we need to check + // first, before parsing, that's when we use `peek()`. + // + parsers: { + // + // The `primary` rule is the *entry* and *exit* point of the parser. + // The rules here can appear at any level of the parse tree. + // + // The recursive nature of the grammar is an interplay between the `block` + // rule, which represents `{ ... }`, the `ruleset` rule, and this `primary` rule, + // as represented by this simplified grammar: + // + // primary → (ruleset | rule)+ + // ruleset → selector+ block + // block → '{' primary '}' + // + // Only at one point is the primary rule not called from the + // block rule: at the root level. + // + primary: function () { + var node, root = []; + + while ((node = $(this.mixin.definition) || $(this.rule) || $(this.ruleset) || + $(this.mixin.call) || $(this.comment) || $(this.directive)) + || $(/^[\s\n]+/)) { + node && root.push(node); + } + return root; + }, + + // We create a Comment node for CSS comments `/* */`, + // but keep the LeSS comments `//` silent, by just skipping + // over them. + comment: function () { + var comment; + + if (input.charAt(i) !== '/') return; + + if (input.charAt(i + 1) === '/') { + return new(tree.Comment)($(/^\/\/.*/), true); + } else if (comment = $(/^\/\*(?:[^*]|\*+[^\/*])*\*+\/\n?/)) { + return new(tree.Comment)(comment); + } + }, + + // + // Entities are tokens which can be found inside an Expression + // + entities: { + // + // A string, which supports escaping " and ' + // + // "milky way" 'he\'s the one!' + // + quoted: function () { + var str, j = i, e; + + if (input.charAt(j) === '~') { j++, e = true } // Escaped strings + if (input.charAt(j) !== '"' && input.charAt(j) !== "'") return; + + e && $('~'); + + if (str = $(/^"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'/)) { + return new(tree.Quoted)(str[0], str[1] || str[2], e); + } + }, + + // + // A catch-all word, such as: + // + // black border-collapse + // + keyword: function () { + var k; + if (k = $(/^[_A-Za-z-][_A-Za-z0-9-]*/)) { return new(tree.Keyword)(k) } + }, + + // + // A function call + // + // rgb(255, 0, 255) + // + // We also try to catch IE's `alpha()`, but let the `alpha` parser + // deal with the details. + // + // The arguments are parsed with the `entities.arguments` parser. + // + call: function () { + var name, args, index = i; + + if (! (name = /^([\w-]+|%)\(/.exec(chunks[j]))) return; + + name = name[1].toLowerCase(); + + if (name === 'url') { return null } + else { i += name.length } + + if (name === 'alpha') { return $(this.alpha) } + + $('('); // Parse the '(' and consume whitespace. + + args = $(this.entities.arguments); + + if (! $(')')) return; + + if (name) { return new(tree.Call)(name, args, index) } + }, + arguments: function () { + var args = [], arg; + + while (arg = $(this.expression)) { + args.push(arg); + if (! $(',')) { break } + } + return args; + }, + literal: function () { + return $(this.entities.dimension) || + $(this.entities.color) || + $(this.entities.quoted); + }, + + // + // Parse url() tokens + // + // We use a specific rule for urls, because they don't really behave like + // standard function calls. The difference is that the argument doesn't have + // to be enclosed within a string, so it can't be parsed as an Expression. + // + url: function () { + var value; + + if (input.charAt(i) !== 'u' || !$(/^url\(/)) return; + value = $(this.entities.quoted) || $(this.entities.variable) || + $(this.entities.dataURI) || $(/^[-\w%@$\/.&=:;#+?~]+/) || ""; + if (! $(')')) throw new(Error)("missing closing ) for url()"); + + return new(tree.URL)((value.value || value.data || value instanceof tree.Variable) + ? value : new(tree.Anonymous)(value), imports.paths); + }, + + dataURI: function () { + var obj; + + if ($(/^data:/)) { + obj = {}; + obj.mime = $(/^[^\/]+\/[^,;)]+/) || ''; + obj.charset = $(/^;\s*charset=[^,;)]+/) || ''; + obj.base64 = $(/^;\s*base64/) || ''; + obj.data = $(/^,\s*[^)]+/); + + if (obj.data) { return obj } + } + }, + + // + // A Variable entity, such as `@fink`, in + // + // width: @fink + 2px + // + // We use a different parser for variable definitions, + // see `parsers.variable`. + // + variable: function () { + var name, index = i; + + if (input.charAt(i) === '@' && (name = $(/^@@?[\w-]+/))) { + return new(tree.Variable)(name, index); + } + }, + + // + // A Hexadecimal color + // + // #4F3C2F + // + // `rgb` and `hsl` colors are parsed through the `entities.call` parser. + // + color: function () { + var rgb; + + if (input.charAt(i) === '#' && (rgb = $(/^#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})/))) { + return new(tree.Color)(rgb[1]); + } + }, + + // + // A Dimension, that is, a number and a unit + // + // 0.5em 95% + // + dimension: function () { + var value, c = input.charCodeAt(i); + if ((c > 57 || c < 45) || c === 47) return; + + if (value = $(/^(-?\d*\.?\d+)(px|%|em|pc|ex|in|deg|s|ms|pt|cm|mm|rad|grad|turn)?/)) { + return new(tree.Dimension)(value[1], value[2]); + } + }, + + // + // JavaScript code to be evaluated + // + // `window.location.href` + // + javascript: function () { + var str, j = i, e; + + if (input.charAt(j) === '~') { j++, e = true } // Escaped strings + if (input.charAt(j) !== '`') { return } + + e && $('~'); + + if (str = $(/^`([^`]*)`/)) { + return new(tree.JavaScript)(str[1], i, e); + } + } + }, + + // + // The variable part of a variable definition. Used in the `rule` parser + // + // @fink: + // + variable: function () { + var name; + + if (input.charAt(i) === '@' && (name = $(/^(@[\w-]+)\s*:/))) { return name[1] } + }, + + // + // A font size/line-height shorthand + // + // small/12px + // + // We need to peek first, or we'll match on keywords and dimensions + // + shorthand: function () { + var a, b; + + if (! peek(/^[@\w.%-]+\/[@\w.-]+/)) return; + + if ((a = $(this.entity)) && $('/') && (b = $(this.entity))) { + return new(tree.Shorthand)(a, b); + } + }, + + // + // Mixins + // + mixin: { + // + // A Mixin call, with an optional argument list + // + // #mixins > .square(#fff); + // .rounded(4px, black); + // .button; + // + // The `while` loop is there because mixins can be + // namespaced, but we only support the child and descendant + // selector for now. + // + call: function () { + var elements = [], e, c, args, index = i, s = input.charAt(i); + + if (s !== '.' && s !== '#') { return } + + while (e = $(/^[#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/)) { + elements.push(new(tree.Element)(c, e)); + c = $('>'); + } + $('(') && (args = $(this.entities.arguments)) && $(')'); + + if (elements.length > 0 && ($(';') || peek('}'))) { + return new(tree.mixin.Call)(elements, args, index); + } + }, + + // + // A Mixin definition, with a list of parameters + // + // .rounded (@radius: 2px, @color) { + // ... + // } + // + // Until we have a finer grained state-machine, we have to + // do a look-ahead, to make sure we don't have a mixin call. + // See the `rule` function for more information. + // + // We start by matching `.rounded (`, and then proceed on to + // the argument list, which has optional default values. + // We store the parameters in `params`, with a `value` key, + // if there is a value, such as in the case of `@radius`. + // + // Once we've got our params list, and a closing `)`, we parse + // the `{...}` block. + // + definition: function () { + var name, params = [], match, ruleset, param, value; + + if ((input.charAt(i) !== '.' && input.charAt(i) !== '#') || + peek(/^[^{]*(;|})/)) return; + + if (match = $(/^([#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+)\s*\(/)) { + name = match[1]; + + while (param = $(this.entities.variable) || $(this.entities.literal) + || $(this.entities.keyword)) { + // Variable + if (param instanceof tree.Variable) { + if ($(':')) { + if (value = $(this.expression)) { + params.push({ name: param.name, value: value }); + } else { + throw new(Error)("Expected value"); + } + } else { + params.push({ name: param.name }); + } + } else { + params.push({ value: param }); + } + if (! $(',')) { break } + } + if (! $(')')) throw new(Error)("Expected )"); + + ruleset = $(this.block); + + if (ruleset) { + return new(tree.mixin.Definition)(name, params, ruleset); + } + } + } + }, + + // + // Entities are the smallest recognized token, + // and can be found inside a rule's value. + // + entity: function () { + return $(this.entities.literal) || $(this.entities.variable) || $(this.entities.url) || + $(this.entities.call) || $(this.entities.keyword) || $(this.entities.javascript) || + $(this.comment); + }, + + // + // A Rule terminator. Note that we use `peek()` to check for '}', + // because the `block` rule will be expecting it, but we still need to make sure + // it's there, if ';' was ommitted. + // + end: function () { + return $(';') || peek('}'); + }, + + // + // IE's alpha function + // + // alpha(opacity=88) + // + alpha: function () { + var value; + + if (! $(/^\(opacity=/i)) return; + if (value = $(/^\d+/) || $(this.entities.variable)) { + if (! $(')')) throw new(Error)("missing closing ) for alpha()"); + return new(tree.Alpha)(value); + } + }, + + // + // A Selector Element + // + // div + // + h1 + // #socks + // input[type="text"] + // + // Elements are the building blocks for Selectors, + // they are made out of a `Combinator` (see combinator rule), + // and an element name, such as a tag a class, or `*`. + // + element: function () { + var e, t, c; + + c = $(this.combinator); + e = $(/^(?:[.#]?|:*)(?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/) || $('*') || $(this.attribute) || $(/^\([^)@]+\)/) || $(/^(?:\d*\.)?\d+%/); + + if (e) { return new(tree.Element)(c, e) } + + if (c.value && c.value[0] === '&') { + return new(tree.Element)(c, null); + } + }, + + // + // Combinators combine elements together, in a Selector. + // + // Because our parser isn't white-space sensitive, special care + // has to be taken, when parsing the descendant combinator, ` `, + // as it's an empty space. We have to check the previous character + // in the input, to see if it's a ` ` character. More info on how + // we deal with this in *combinator.js*. + // + combinator: function () { + var match, c = input.charAt(i); + + if (c === '>' || c === '+' || c === '~') { + i++; + while (input.charAt(i) === ' ') { i++ } + return new(tree.Combinator)(c); + } else if (c === '&') { + match = '&'; + i++; + if(input.charAt(i) === ' ') { + match = '& '; + } + while (input.charAt(i) === ' ') { i++ } + return new(tree.Combinator)(match); + } else if (c === ':' && input.charAt(i + 1) === ':') { + i += 2; + while (input.charAt(i) === ' ') { i++ } + return new(tree.Combinator)('::'); + } else if (input.charAt(i - 1) === ' ') { + return new(tree.Combinator)(" "); + } else { + return new(tree.Combinator)(null); + } + }, + + // + // A CSS Selector + // + // .class > div + h1 + // li a:hover + // + // Selectors are made out of one or more Elements, see above. + // + selector: function () { + var sel, e, elements = [], c, match; + + while (e = $(this.element)) { + c = input.charAt(i); + elements.push(e) + if (c === '{' || c === '}' || c === ';' || c === ',') { break } + } + + if (elements.length > 0) { return new(tree.Selector)(elements) } + }, + tag: function () { + return $(/^[a-zA-Z][a-zA-Z-]*[0-9]?/) || $('*'); + }, + attribute: function () { + var attr = '', key, val, op; + + if (! $('[')) return; + + if (key = $(/^[a-zA-Z-]+/) || $(this.entities.quoted)) { + if ((op = $(/^[|~*$^]?=/)) && + (val = $(this.entities.quoted) || $(/^[\w-]+/))) { + attr = [key, op, val.toCSS ? val.toCSS() : val].join(''); + } else { attr = key } + } + + if (! $(']')) return; + + if (attr) { return "[" + attr + "]" } + }, + + // + // The `block` rule is used by `ruleset` and `mixin.definition`. + // It's a wrapper around the `primary` rule, with added `{}`. + // + block: function () { + var content; + + if ($('{') && (content = $(this.primary)) && $('}')) { + return content; + } + }, + + // + // div, .class, body > p {...} + // + ruleset: function () { + var selectors = [], s, rules, match; + save(); + + while (s = $(this.selector)) { + selectors.push(s); + $(this.comment); + if (! $(',')) { break } + $(this.comment); + } + + if (selectors.length > 0 && (rules = $(this.block))) { + return new(tree.Ruleset)(selectors, rules); + } else { + // Backtrack + furthest = i; + restore(); + } + }, + rule: function () { + var name, value, c = input.charAt(i), important, match; + save(); + + if (c === '.' || c === '#' || c === '&') { return } + + if (name = $(this.variable) || $(this.property)) { + if ((name.charAt(0) != '@') && (match = /^([^@+\/'"*`(;{}-]*);/.exec(chunks[j]))) { + i += match[0].length - 1; + value = new(tree.Anonymous)(match[1]); + } else if (name === "font") { + value = $(this.font); + } else { + value = $(this.value); + } + important = $(this.important); + + if (value && $(this.end)) { + return new(tree.Rule)(name, value, important, memo); + } else { + furthest = i; + restore(); + } + } + }, + + // + // An @import directive + // + // @import "lib"; + // + // Depending on our environemnt, importing is done differently: + // In the browser, it's an XHR request, in Node, it would be a + // file-system operation. The function used for importing is + // stored in `import`, which we pass to the Import constructor. + // + "import": function () { + var path; + if ($(/^@import\s+/) && + (path = $(this.entities.quoted) || $(this.entities.url)) && + $(';')) { + return new(tree.Import)(path, imports); + } + }, + + // + // A CSS Directive + // + // @charset "utf-8"; + // + directive: function () { + var name, value, rules, types; + + if (input.charAt(i) !== '@') return; + + if (value = $(this['import'])) { + return value; + } else if (name = $(/^@media|@page/) || $(/^@(?:-webkit-|-moz-)?keyframes/)) { + types = ($(/^[^{]+/) || '').trim(); + if (rules = $(this.block)) { + return new(tree.Directive)(name + " " + types, rules); + } + } else if (name = $(/^@[-a-z]+/)) { + if (name === '@font-face') { + if (rules = $(this.block)) { + return new(tree.Directive)(name, rules); + } + } else if ((value = $(this.entity)) && $(';')) { + return new(tree.Directive)(name, value); + } + } + }, + font: function () { + var value = [], expression = [], weight, shorthand, font, e; + + while (e = $(this.shorthand) || $(this.entity)) { + expression.push(e); + } + value.push(new(tree.Expression)(expression)); + + if ($(',')) { + while (e = $(this.expression)) { + value.push(e); + if (! $(',')) { break } + } + } + return new(tree.Value)(value); + }, + + // + // A Value is a comma-delimited list of Expressions + // + // font-family: Baskerville, Georgia, serif; + // + // In a Rule, a Value represents everything after the `:`, + // and before the `;`. + // + value: function () { + var e, expressions = [], important; + + while (e = $(this.expression)) { + expressions.push(e); + if (! $(',')) { break } + } + + if (expressions.length > 0) { + return new(tree.Value)(expressions); + } + }, + important: function () { + if (input.charAt(i) === '!') { + return $(/^! *important/); + } + }, + sub: function () { + var e; + + if ($('(') && (e = $(this.expression)) && $(')')) { + return e; + } + }, + multiplication: function () { + var m, a, op, operation; + if (m = $(this.operand)) { + while ((op = ($('/') || $('*'))) && (a = $(this.operand))) { + operation = new(tree.Operation)(op, [operation || m, a]); + } + return operation || m; + } + }, + addition: function () { + var m, a, op, operation; + if (m = $(this.multiplication)) { + while ((op = $(/^[-+]\s+/) || (input.charAt(i - 1) != ' ' && ($('+') || $('-')))) && + (a = $(this.multiplication))) { + operation = new(tree.Operation)(op, [operation || m, a]); + } + return operation || m; + } + }, + + // + // An operand is anything that can be part of an operation, + // such as a Color, or a Variable + // + operand: function () { + var negate, p = input.charAt(i + 1); + + if (input.charAt(i) === '-' && (p === '@' || p === '(')) { negate = $('-') } + var o = $(this.sub) || $(this.entities.dimension) || + $(this.entities.color) || $(this.entities.variable) || + $(this.entities.call); + return negate ? new(tree.Operation)('*', [new(tree.Dimension)(-1), o]) + : o; + }, + + // + // Expressions either represent mathematical operations, + // or white-space delimited Entities. + // + // 1px solid black + // @var * 2 + // + expression: function () { + var e, delim, entities = [], d; + + while (e = $(this.addition) || $(this.entity)) { + entities.push(e); + } + if (entities.length > 0) { + return new(tree.Expression)(entities); + } + }, + property: function () { + var name; + + if (name = $(/^(\*?-?[-a-z_0-9]+)\s*:/)) { + return name[1]; + } + } + } + }; +}; + +if (less.mode === 'browser' || less.mode === 'rhino') { + // + // Used by `@import` directives + // + less.Parser.importer = function (path, paths, callback, env) { + if (path.charAt(0) !== '/' && paths.length > 0) { + path = paths[0] + path; + } + // We pass `true` as 3rd argument, to force the reload of the import. + // This is so we can get the syntax tree as opposed to just the CSS output, + // as we need this to evaluate the current stylesheet. + loadStyleSheet({ href: path, title: path, type: env.mime }, callback, true); + }; +} + diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/rhino.js b/src/dashboard/src/media/vendor/less.js/lib/less/rhino.js new file mode 100644 index 0000000000..ab1c886821 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/rhino.js @@ -0,0 +1,60 @@ +var name; + +function loadStyleSheet(sheet, callback, reload, remaining) { + var sheetName = name.slice(0, name.lastIndexOf('/') + 1) + sheet.href; + var input = readFile(sheetName); + var parser = new less.Parser(); + parser.parse(input, function (e, root) { + if (e) { + print("Error: " + e); + quit(1); + } + callback(root, sheet, { local: false, lastModified: 0, remaining: remaining }); + }); + + // callback({}, sheet, { local: true, remaining: remaining }); +} + +function writeFile(filename, content) { + var fstream = new java.io.FileWriter(filename); + var out = new java.io.BufferedWriter(fstream); + out.write(content); + out.close(); +} + +// Command line integration via Rhino +(function (args) { + name = args[0]; + var output = args[1]; + + if (!name) { + print('No files present in the fileset; Check your pattern match in build.xml'); + quit(1); + } + path = name.split("/");path.pop();path=path.join("/") + + var input = readFile(name); + + if (!input) { + print('lesscss: couldn\'t open file ' + name); + quit(1); + } + + var result; + var parser = new less.Parser(); + parser.parse(input, function (e, root) { + if (e) { + quit(1); + } else { + result = root.toCSS(); + if (output) { + writeFile(output, result); + print("Written to " + output); + } else { + print(result); + } + quit(0); + } + }); + print("done"); +}(arguments)); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree.js new file mode 100644 index 0000000000..eb08aa4ffb --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree.js @@ -0,0 +1,13 @@ +require('less/tree').find = function (obj, fun) { + for (var i = 0, r; i < obj.length; i++) { + if (r = fun.call(obj, obj[i])) { return r } + } + return null; +}; +require('less/tree').jsify = function (obj) { + if (Array.isArray(obj.value) && (obj.value.length > 1)) { + return '[' + obj.value.map(function (v) { return v.toCSS(false) }).join(', ') + ']'; + } else { + return obj.toCSS(false); + } +}; diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/alpha.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/alpha.js new file mode 100644 index 0000000000..551ccba6bf --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/alpha.js @@ -0,0 +1,17 @@ +(function (tree) { + +tree.Alpha = function (val) { + this.value = val; +}; +tree.Alpha.prototype = { + toCSS: function () { + return "alpha(opacity=" + + (this.value.toCSS ? this.value.toCSS() : this.value) + ")"; + }, + eval: function (env) { + if (this.value.eval) { this.value = this.value.eval(env) } + return this; + } +}; + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/anonymous.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/anonymous.js new file mode 100644 index 0000000000..89840d0df0 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/anonymous.js @@ -0,0 +1,13 @@ +(function (tree) { + +tree.Anonymous = function (string) { + this.value = string.value || string; +}; +tree.Anonymous.prototype = { + toCSS: function () { + return this.value; + }, + eval: function () { return this } +}; + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/call.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/call.js new file mode 100644 index 0000000000..4a72932bf9 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/call.js @@ -0,0 +1,45 @@ +(function (tree) { + +// +// A function call node. +// +tree.Call = function (name, args, index) { + this.name = name; + this.args = args; + this.index = index; +}; +tree.Call.prototype = { + // + // When evaluating a function call, + // we either find the function in `tree.functions` [1], + // in which case we call it, passing the evaluated arguments, + // or we simply print it out as it appeared originally [2]. + // + // The *functions.js* file contains the built-in functions. + // + // The reason why we evaluate the arguments, is in the case where + // we try to pass a variable to a function, like: `saturate(@color)`. + // The function should receive the value, not the variable. + // + eval: function (env) { + var args = this.args.map(function (a) { return a.eval(env) }); + + if (this.name in tree.functions) { // 1. + try { + return tree.functions[this.name].apply(tree.functions, args); + } catch (e) { + throw { message: "error evaluating function `" + this.name + "`", + index: this.index }; + } + } else { // 2. + return new(tree.Anonymous)(this.name + + "(" + args.map(function (a) { return a.toCSS() }).join(', ') + ")"); + } + }, + + toCSS: function (env) { + return this.eval(env).toCSS(); + } +}; + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/color.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/color.js new file mode 100644 index 0000000000..bb7646a5c9 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/color.js @@ -0,0 +1,101 @@ +(function (tree) { +// +// RGB Colors - #ff0014, #eee +// +tree.Color = function (rgb, a) { + // + // The end goal here, is to parse the arguments + // into an integer triplet, such as `128, 255, 0` + // + // This facilitates operations and conversions. + // + if (Array.isArray(rgb)) { + this.rgb = rgb; + } else if (rgb.length == 6) { + this.rgb = rgb.match(/.{2}/g).map(function (c) { + return parseInt(c, 16); + }); + } else { + this.rgb = rgb.split('').map(function (c) { + return parseInt(c + c, 16); + }); + } + this.alpha = typeof(a) === 'number' ? a : 1; +}; +tree.Color.prototype = { + eval: function () { return this }, + + // + // If we have some transparency, the only way to represent it + // is via `rgba`. Otherwise, we use the hex representation, + // which has better compatibility with older browsers. + // Values are capped between `0` and `255`, rounded and zero-padded. + // + toCSS: function () { + if (this.alpha < 1.0) { + return "rgba(" + this.rgb.map(function (c) { + return Math.round(c); + }).concat(this.alpha).join(', ') + ")"; + } else { + return '#' + this.rgb.map(function (i) { + i = Math.round(i); + i = (i > 255 ? 255 : (i < 0 ? 0 : i)).toString(16); + return i.length === 1 ? '0' + i : i; + }).join(''); + } + }, + + // + // Operations have to be done per-channel, if not, + // channels will spill onto each other. Once we have + // our result, in the form of an integer triplet, + // we create a new Color node to hold the result. + // + operate: function (op, other) { + var result = []; + + if (! (other instanceof tree.Color)) { + other = other.toColor(); + } + + for (var c = 0; c < 3; c++) { + result[c] = tree.operate(op, this.rgb[c], other.rgb[c]); + } + return new(tree.Color)(result, this.alpha + other.alpha); + }, + + toHSL: function () { + var r = this.rgb[0] / 255, + g = this.rgb[1] / 255, + b = this.rgb[2] / 255, + a = this.alpha; + + var max = Math.max(r, g, b), min = Math.min(r, g, b); + var h, s, l = (max + min) / 2, d = max - min; + + if (max === min) { + h = s = 0; + } else { + s = l > 0.5 ? d / (2 - max - min) : d / (max + min); + + switch (max) { + case r: h = (g - b) / d + (g < b ? 6 : 0); break; + case g: h = (b - r) / d + 2; break; + case b: h = (r - g) / d + 4; break; + } + h /= 6; + } + return { h: h * 360, s: s, l: l, a: a }; + }, + toARGB: function () { + var argb = [Math.round(this.alpha * 255)].concat(this.rgb); + return '#' + argb.map(function (i) { + i = Math.round(i); + i = (i > 255 ? 255 : (i < 0 ? 0 : i)).toString(16); + return i.length === 1 ? '0' + i : i; + }).join(''); + } +}; + + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/comment.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/comment.js new file mode 100644 index 0000000000..2d95dff893 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/comment.js @@ -0,0 +1,14 @@ +(function (tree) { + +tree.Comment = function (value, silent) { + this.value = value; + this.silent = !!silent; +}; +tree.Comment.prototype = { + toCSS: function (env) { + return env.compress ? '' : this.value; + }, + eval: function () { return this } +}; + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/dimension.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/dimension.js new file mode 100644 index 0000000000..41f3ca2cbd --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/dimension.js @@ -0,0 +1,34 @@ +(function (tree) { + +// +// A number with a unit +// +tree.Dimension = function (value, unit) { + this.value = parseFloat(value); + this.unit = unit || null; +}; + +tree.Dimension.prototype = { + eval: function () { return this }, + toColor: function () { + return new(tree.Color)([this.value, this.value, this.value]); + }, + toCSS: function () { + var css = this.value + this.unit; + return css; + }, + + // In an operation between two Dimensions, + // we default to the first Dimension's unit, + // so `1px + 2em` will yield `3px`. + // In the future, we could implement some unit + // conversions such that `100cm + 10mm` would yield + // `101cm`. + operate: function (op, other) { + return new(tree.Dimension) + (tree.operate(op, this.value, other.value), + this.unit || other.unit); + } +}; + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/directive.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/directive.js new file mode 100644 index 0000000000..fbe9a93e5e --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/directive.js @@ -0,0 +1,33 @@ +(function (tree) { + +tree.Directive = function (name, value) { + this.name = name; + if (Array.isArray(value)) { + this.ruleset = new(tree.Ruleset)([], value); + } else { + this.value = value; + } +}; +tree.Directive.prototype = { + toCSS: function (ctx, env) { + if (this.ruleset) { + this.ruleset.root = true; + return this.name + (env.compress ? '{' : ' {\n ') + + this.ruleset.toCSS(ctx, env).trim().replace(/\n/g, '\n ') + + (env.compress ? '}': '\n}\n'); + } else { + return this.name + ' ' + this.value.toCSS() + ';\n'; + } + }, + eval: function (env) { + env.frames.unshift(this); + this.ruleset = this.ruleset && this.ruleset.eval(env); + env.frames.shift(); + return this; + }, + variable: function (name) { return tree.Ruleset.prototype.variable.call(this.ruleset, name) }, + find: function () { return tree.Ruleset.prototype.find.apply(this.ruleset, arguments) }, + rulesets: function () { return tree.Ruleset.prototype.rulesets.apply(this.ruleset) } +}; + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/element.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/element.js new file mode 100644 index 0000000000..27cf822857 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/element.js @@ -0,0 +1,35 @@ +(function (tree) { + +tree.Element = function (combinator, value) { + this.combinator = combinator instanceof tree.Combinator ? + combinator : new(tree.Combinator)(combinator); + this.value = value ? value.trim() : ""; +}; +tree.Element.prototype.toCSS = function (env) { + return this.combinator.toCSS(env || {}) + this.value; +}; + +tree.Combinator = function (value) { + if (value === ' ') { + this.value = ' '; + } else if (value === '& ') { + this.value = '& '; + } else { + this.value = value ? value.trim() : ""; + } +}; +tree.Combinator.prototype.toCSS = function (env) { + return { + '' : '', + ' ' : ' ', + '&' : '', + '& ' : ' ', + ':' : ' :', + '::': '::', + '+' : env.compress ? '+' : ' + ', + '~' : env.compress ? '~' : ' ~ ', + '>' : env.compress ? '>' : ' > ' + }[this.value]; +}; + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/expression.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/expression.js new file mode 100644 index 0000000000..f638a1be6b --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/expression.js @@ -0,0 +1,23 @@ +(function (tree) { + +tree.Expression = function (value) { this.value = value }; +tree.Expression.prototype = { + eval: function (env) { + if (this.value.length > 1) { + return new(tree.Expression)(this.value.map(function (e) { + return e.eval(env); + })); + } else if (this.value.length === 1) { + return this.value[0].eval(env); + } else { + return this; + } + }, + toCSS: function (env) { + return this.value.map(function (e) { + return e.toCSS(env); + }).join(' '); + } +}; + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/import.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/import.js new file mode 100644 index 0000000000..427c1095ef --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/import.js @@ -0,0 +1,77 @@ +(function (tree) { +// +// CSS @import node +// +// The general strategy here is that we don't want to wait +// for the parsing to be completed, before we start importing +// the file. That's because in the context of a browser, +// most of the time will be spent waiting for the server to respond. +// +// On creation, we push the import path to our import queue, though +// `import,push`, we also pass it a callback, which it'll call once +// the file has been fetched, and parsed. +// +tree.Import = function (path, imports) { + var that = this; + + this._path = path; + + // The '.less' extension is optional + if (path instanceof tree.Quoted) { + this.path = /\.(le?|c)ss(\?.*)?$/.test(path.value) ? path.value : path.value + '.less'; + } else { + this.path = path.value.value || path.value; + } + + this.css = /css(\?.*)?$/.test(this.path); + + // Only pre-compile .less files + if (! this.css) { + imports.push(this.path, function (root) { + if (! root) { + throw new(Error)("Error parsing " + that.path); + } + that.root = root; + }); + } +}; + +// +// The actual import node doesn't return anything, when converted to CSS. +// The reason is that it's used at the evaluation stage, so that the rules +// it imports can be treated like any other rules. +// +// In `eval`, we make sure all Import nodes get evaluated, recursively, so +// we end up with a flat structure, which can easily be imported in the parent +// ruleset. +// +tree.Import.prototype = { + toCSS: function () { + if (this.css) { + return "@import " + this._path.toCSS() + ';\n'; + } else { + return ""; + } + }, + eval: function (env) { + var ruleset; + + if (this.css) { + return this; + } else { + ruleset = new(tree.Ruleset)(null, this.root.rules.slice(0)); + + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.Import) { + Array.prototype + .splice + .apply(ruleset.rules, + [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + return ruleset.rules; + } + } +}; + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/javascript.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/javascript.js new file mode 100644 index 0000000000..4ec66b9eb2 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/javascript.js @@ -0,0 +1,51 @@ +(function (tree) { + +tree.JavaScript = function (string, index, escaped) { + this.escaped = escaped; + this.expression = string; + this.index = index; +}; +tree.JavaScript.prototype = { + eval: function (env) { + var result, + that = this, + context = {}; + + var expression = this.expression.replace(/@\{([\w-]+)\}/g, function (_, name) { + return tree.jsify(new(tree.Variable)('@' + name, that.index).eval(env)); + }); + + try { + expression = new(Function)('return (' + expression + ')'); + } catch (e) { + throw { message: "JavaScript evaluation error: `" + expression + "`" , + index: this.index }; + } + + for (var k in env.frames[0].variables()) { + context[k.slice(1)] = { + value: env.frames[0].variables()[k].value, + toJS: function () { + return this.value.eval(env).toCSS(); + } + }; + } + + try { + result = expression.call(context); + } catch (e) { + throw { message: "JavaScript evaluation error: '" + e.name + ': ' + e.message + "'" , + index: this.index }; + } + if (typeof(result) === 'string') { + return new(tree.Quoted)('"' + result + '"', result, this.escaped, this.index); + } else if (Array.isArray(result)) { + return new(tree.Anonymous)(result.join(', ')); + } else { + return new(tree.Anonymous)(result); + } + } +}; + +})(require('less/tree')); + diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/keyword.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/keyword.js new file mode 100644 index 0000000000..a4431ba3a4 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/keyword.js @@ -0,0 +1,9 @@ +(function (tree) { + +tree.Keyword = function (value) { this.value = value }; +tree.Keyword.prototype = { + eval: function () { return this }, + toCSS: function () { return this.value } +}; + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/mixin.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/mixin.js new file mode 100644 index 0000000000..24cb8e4c0e --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/mixin.js @@ -0,0 +1,106 @@ +(function (tree) { + +tree.mixin = {}; +tree.mixin.Call = function (elements, args, index) { + this.selector = new(tree.Selector)(elements); + this.arguments = args; + this.index = index; +}; +tree.mixin.Call.prototype = { + eval: function (env) { + var mixins, args, rules = [], match = false; + + for (var i = 0; i < env.frames.length; i++) { + if ((mixins = env.frames[i].find(this.selector)).length > 0) { + args = this.arguments && this.arguments.map(function (a) { return a.eval(env) }); + for (var m = 0; m < mixins.length; m++) { + if (mixins[m].match(args, env)) { + try { + Array.prototype.push.apply( + rules, mixins[m].eval(env, this.arguments).rules); + match = true; + } catch (e) { + throw { message: e.message, index: e.index, stack: e.stack, call: this.index }; + } + } + } + if (match) { + return rules; + } else { + throw { message: 'No matching definition was found for `' + + this.selector.toCSS().trim() + '(' + + this.arguments.map(function (a) { + return a.toCSS(); + }).join(', ') + ")`", + index: this.index }; + } + } + } + throw { message: this.selector.toCSS().trim() + " is undefined", + index: this.index }; + } +}; + +tree.mixin.Definition = function (name, params, rules) { + this.name = name; + this.selectors = [new(tree.Selector)([new(tree.Element)(null, name)])]; + this.params = params; + this.arity = params.length; + this.rules = rules; + this._lookups = {}; + this.required = params.reduce(function (count, p) { + if (!p.name || (p.name && !p.value)) { return count + 1 } + else { return count } + }, 0); + this.parent = tree.Ruleset.prototype; + this.frames = []; +}; +tree.mixin.Definition.prototype = { + toCSS: function () { return "" }, + variable: function (name) { return this.parent.variable.call(this, name) }, + variables: function () { return this.parent.variables.call(this) }, + find: function () { return this.parent.find.apply(this, arguments) }, + rulesets: function () { return this.parent.rulesets.apply(this) }, + + eval: function (env, args) { + var frame = new(tree.Ruleset)(null, []), context, _arguments = []; + + for (var i = 0, val; i < this.params.length; i++) { + if (this.params[i].name) { + if (val = (args && args[i]) || this.params[i].value) { + frame.rules.unshift(new(tree.Rule)(this.params[i].name, val.eval(env))); + } else { + throw { message: "wrong number of arguments for " + this.name + + ' (' + args.length + ' for ' + this.arity + ')' }; + } + } + } + for (var i = 0; i < Math.max(this.params.length, args && args.length); i++) { + _arguments.push(args[i] || this.params[i].value); + } + frame.rules.unshift(new(tree.Rule)('@arguments', new(tree.Expression)(_arguments).eval(env))); + + return new(tree.Ruleset)(null, this.rules.slice(0)).eval({ + frames: [this, frame].concat(this.frames, env.frames) + }); + }, + match: function (args, env) { + var argsLength = (args && args.length) || 0, len; + + if (argsLength < this.required) { return false } + if ((this.required > 0) && (argsLength > this.params.length)) { return false } + + len = Math.min(argsLength, this.arity); + + for (var i = 0; i < len; i++) { + if (!this.params[i].name) { + if (args[i].eval(env).toCSS() != this.params[i].value.eval(env).toCSS()) { + return false; + } + } + } + return true; + } +}; + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/operation.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/operation.js new file mode 100644 index 0000000000..d2e4d5780a --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/operation.js @@ -0,0 +1,32 @@ +(function (tree) { + +tree.Operation = function (op, operands) { + this.op = op.trim(); + this.operands = operands; +}; +tree.Operation.prototype.eval = function (env) { + var a = this.operands[0].eval(env), + b = this.operands[1].eval(env), + temp; + + if (a instanceof tree.Dimension && b instanceof tree.Color) { + if (this.op === '*' || this.op === '+') { + temp = b, b = a, a = temp; + } else { + throw { name: "OperationError", + message: "Can't substract or divide a color from a number" }; + } + } + return a.operate(this.op, b); +}; + +tree.operate = function (op, a, b) { + switch (op) { + case '+': return a + b; + case '-': return a - b; + case '*': return a * b; + case '/': return a / b; + } +}; + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/quoted.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/quoted.js new file mode 100644 index 0000000000..6ddfa40f51 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/quoted.js @@ -0,0 +1,29 @@ +(function (tree) { + +tree.Quoted = function (str, content, escaped, i) { + this.escaped = escaped; + this.value = content || ''; + this.quote = str.charAt(0); + this.index = i; +}; +tree.Quoted.prototype = { + toCSS: function () { + if (this.escaped) { + return this.value; + } else { + return this.quote + this.value + this.quote; + } + }, + eval: function (env) { + var that = this; + var value = this.value.replace(/`([^`]+)`/g, function (_, exp) { + return new(tree.JavaScript)(exp, that.index, true).eval(env).value; + }).replace(/@\{([\w-]+)\}/g, function (_, name) { + var v = new(tree.Variable)('@' + name, that.index).eval(env); + return v.value || v.toCSS(); + }); + return new(tree.Quoted)(this.quote + value + this.quote, value, this.escaped, this.index); + } +}; + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/rule.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/rule.js new file mode 100644 index 0000000000..18cc49bfa6 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/rule.js @@ -0,0 +1,38 @@ +(function (tree) { + +tree.Rule = function (name, value, important, index) { + this.name = name; + this.value = (value instanceof tree.Value) ? value : new(tree.Value)([value]); + this.important = important ? ' ' + important.trim() : ''; + this.index = index; + + if (name.charAt(0) === '@') { + this.variable = true; + } else { this.variable = false } +}; +tree.Rule.prototype.toCSS = function (env) { + if (this.variable) { return "" } + else { + return this.name + (env.compress ? ':' : ': ') + + this.value.toCSS(env) + + this.important + ";"; + } +}; + +tree.Rule.prototype.eval = function (context) { + return new(tree.Rule)(this.name, this.value.eval(context), this.important, this.index); +}; + +tree.Shorthand = function (a, b) { + this.a = a; + this.b = b; +}; + +tree.Shorthand.prototype = { + toCSS: function (env) { + return this.a.toCSS(env) + "/" + this.b.toCSS(env); + }, + eval: function () { return this } +}; + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/ruleset.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/ruleset.js new file mode 100644 index 0000000000..cc9a60aecd --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/ruleset.js @@ -0,0 +1,212 @@ +(function (tree) { + +tree.Ruleset = function (selectors, rules) { + this.selectors = selectors; + this.rules = rules; + this._lookups = {}; +}; +tree.Ruleset.prototype = { + eval: function (env) { + var ruleset = new(tree.Ruleset)(this.selectors, this.rules.slice(0)); + + ruleset.root = this.root; + + // push the current ruleset to the frames stack + env.frames.unshift(ruleset); + + // Evaluate imports + if (ruleset.root) { + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.Import) { + Array.prototype.splice + .apply(ruleset.rules, [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + } + + // Store the frames around mixin definitions, + // so they can be evaluated like closures when the time comes. + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.mixin.Definition) { + ruleset.rules[i].frames = env.frames.slice(0); + } + } + + // Evaluate mixin calls. + for (var i = 0; i < ruleset.rules.length; i++) { + if (ruleset.rules[i] instanceof tree.mixin.Call) { + Array.prototype.splice + .apply(ruleset.rules, [i, 1].concat(ruleset.rules[i].eval(env))); + } + } + + // Evaluate everything else + for (var i = 0, rule; i < ruleset.rules.length; i++) { + rule = ruleset.rules[i]; + + if (! (rule instanceof tree.mixin.Definition)) { + ruleset.rules[i] = rule.eval ? rule.eval(env) : rule; + } + } + + // Pop the stack + env.frames.shift(); + + return ruleset; + }, + match: function (args) { + return !args || args.length === 0; + }, + variables: function () { + if (this._variables) { return this._variables } + else { + return this._variables = this.rules.reduce(function (hash, r) { + if (r instanceof tree.Rule && r.variable === true) { + hash[r.name] = r; + } + return hash; + }, {}); + } + }, + variable: function (name) { + return this.variables()[name]; + }, + rulesets: function () { + if (this._rulesets) { return this._rulesets } + else { + return this._rulesets = this.rules.filter(function (r) { + return (r instanceof tree.Ruleset) || (r instanceof tree.mixin.Definition); + }); + } + }, + find: function (selector, self) { + self = self || this; + var rules = [], rule, match, + key = selector.toCSS(); + + if (key in this._lookups) { return this._lookups[key] } + + this.rulesets().forEach(function (rule) { + if (rule !== self) { + for (var j = 0; j < rule.selectors.length; j++) { + if (match = selector.match(rule.selectors[j])) { + if (selector.elements.length > rule.selectors[j].elements.length) { + Array.prototype.push.apply(rules, rule.find( + new(tree.Selector)(selector.elements.slice(1)), self)); + } else { + rules.push(rule); + } + break; + } + } + } + }); + return this._lookups[key] = rules; + }, + // + // Entry point for code generation + // + // `context` holds an array of arrays. + // + toCSS: function (context, env) { + var css = [], // The CSS output + rules = [], // node.Rule instances + rulesets = [], // node.Ruleset instances + paths = [], // Current selectors + selector, // The fully rendered selector + rule; + + if (! this.root) { + if (context.length === 0) { + paths = this.selectors.map(function (s) { return [s] }); + } else { + this.joinSelectors( paths, context, this.selectors ); + } + } + + // Compile rules and rulesets + for (var i = 0; i < this.rules.length; i++) { + rule = this.rules[i]; + + if (rule.rules || (rule instanceof tree.Directive)) { + rulesets.push(rule.toCSS(paths, env)); + } else if (rule instanceof tree.Comment) { + if (!rule.silent) { + if (this.root) { + rulesets.push(rule.toCSS(env)); + } else { + rules.push(rule.toCSS(env)); + } + } + } else { + if (rule.toCSS && !rule.variable) { + rules.push(rule.toCSS(env)); + } else if (rule.value && !rule.variable) { + rules.push(rule.value.toString()); + } + } + } + + rulesets = rulesets.join(''); + + // If this is the root node, we don't render + // a selector, or {}. + // Otherwise, only output if this ruleset has rules. + if (this.root) { + css.push(rules.join(env.compress ? '' : '\n')); + } else { + if (rules.length > 0) { + selector = paths.map(function (p) { + return p.map(function (s) { + return s.toCSS(env); + }).join('').trim(); + }).join(env.compress ? ',' : (paths.length > 3 ? ',\n' : ', ')); + css.push(selector, + (env.compress ? '{' : ' {\n ') + + rules.join(env.compress ? '' : '\n ') + + (env.compress ? '}' : '\n}\n')); + } + } + css.push(rulesets); + + return css.join('') + (env.compress ? '\n' : ''); + }, + + joinSelectors: function (paths, context, selectors) { + for (var s = 0; s < selectors.length; s++) { + this.joinSelector(paths, context, selectors[s]); + } + }, + + joinSelector: function (paths, context, selector) { + var before = [], after = [], beforeElements = [], + afterElements = [], hasParentSelector = false, el; + + for (var i = 0; i < selector.elements.length; i++) { + el = selector.elements[i]; + if (el.combinator.value[0] === '&') { + hasParentSelector = true; + } + if (hasParentSelector) afterElements.push(el); + else beforeElements.push(el); + } + + if (! hasParentSelector) { + afterElements = beforeElements; + beforeElements = []; + } + + if (beforeElements.length > 0) { + before.push(new(tree.Selector)(beforeElements)); + } + + if (afterElements.length > 0) { + after.push(new(tree.Selector)(afterElements)); + } + + for (var c = 0; c < context.length; c++) { + paths.push(before.concat(context[c]).concat(after)); + } + } +}; +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/selector.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/selector.js new file mode 100644 index 0000000000..1cf41967b1 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/selector.js @@ -0,0 +1,37 @@ +(function (tree) { + +tree.Selector = function (elements) { + this.elements = elements; + if (this.elements[0].combinator.value === "") { + this.elements[0].combinator.value = ' '; + } +}; +tree.Selector.prototype.match = function (other) { + var len = this.elements.length, + olen = other.elements.length, + max = Math.min(len, olen); + + if (len < olen) { + return false; + } else { + for (var i = 0; i < max; i++) { + if (this.elements[i].value !== other.elements[i].value) { + return false; + } + } + } + return true; +}; +tree.Selector.prototype.toCSS = function (env) { + if (this._css) { return this._css } + + return this._css = this.elements.map(function (e) { + if (typeof(e) === 'string') { + return ' ' + e.trim(); + } else { + return e.toCSS(env); + } + }).join(''); +}; + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/url.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/url.js new file mode 100644 index 0000000000..a45645881d --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/url.js @@ -0,0 +1,25 @@ +(function (tree) { + +tree.URL = function (val, paths) { + if (val.data) { + this.attrs = val; + } else { + // Add the base path if the URL is relative and we are in the browser + if (!/^(?:https?:\/\/|file:\/\/|data:)?/.test(val.value) && paths.length > 0 && typeof(window) !== 'undefined') { + val.value = paths[0] + (val.value.charAt(0) === '/' ? val.value.slice(1) : val.value); + } + this.value = val; + this.paths = paths; + } +}; +tree.URL.prototype = { + toCSS: function () { + return "url(" + (this.attrs ? 'data:' + this.attrs.mime + this.attrs.charset + this.attrs.base64 + this.attrs.data + : this.value.toCSS()) + ")"; + }, + eval: function (ctx) { + return this.attrs ? this : new(tree.URL)(this.value.eval(ctx), this.paths); + } +}; + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/value.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/value.js new file mode 100644 index 0000000000..922096cdc6 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/value.js @@ -0,0 +1,24 @@ +(function (tree) { + +tree.Value = function (value) { + this.value = value; + this.is = 'value'; +}; +tree.Value.prototype = { + eval: function (env) { + if (this.value.length === 1) { + return this.value[0].eval(env); + } else { + return new(tree.Value)(this.value.map(function (v) { + return v.eval(env); + })); + } + }, + toCSS: function (env) { + return this.value.map(function (e) { + return e.toCSS(env); + }).join(env.compress ? ',' : ', '); + } +}; + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/lib/less/tree/variable.js b/src/dashboard/src/media/vendor/less.js/lib/less/tree/variable.js new file mode 100644 index 0000000000..10f7c08476 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/lib/less/tree/variable.js @@ -0,0 +1,24 @@ +(function (tree) { + +tree.Variable = function (name, index) { this.name = name, this.index = index }; +tree.Variable.prototype = { + eval: function (env) { + var variable, v, name = this.name; + + if (name.indexOf('@@') == 0) { + name = '@' + new(tree.Variable)(name.slice(1)).eval(env).value; + } + + if (variable = tree.find(env.frames, function (frame) { + if (v = frame.variable(name)) { + return v.value.eval(env); + } + })) { return variable } + else { + throw { message: "variable " + name + " is undefined", + index: this.index }; + } + } +}; + +})(require('less/tree')); diff --git a/src/dashboard/src/media/vendor/less.js/package.json b/src/dashboard/src/media/vendor/less.js/package.json new file mode 100644 index 0000000000..123f2d9a54 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/package.json @@ -0,0 +1,13 @@ +{ + "name" : "less", + "description" : "Leaner CSS", + "url" : "http://lesscss.org", + "keywords" : ["css", "parser", "lesscss", "browser"], + "author" : "Alexis Sellier ", + "contributors" : [], + "version" : "1.1.4", + "bin" : { "lessc": "./bin/lessc" }, + "main" : "./lib/less/index", + "directories" : { "test": "./test" }, + "engines" : { "node": ">=0.4.0" } +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/colors.css b/src/dashboard/src/media/vendor/less.js/test/css/colors.css new file mode 100644 index 0000000000..540f9b065e --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/colors.css @@ -0,0 +1,48 @@ +#yelow #short { + color: #fea; +} +#yelow #long { + color: #ffeeaa; +} +#yelow #rgba { + color: rgba(255, 238, 170, 0.1); +} +#yelow #argb { + color: #1affeeaa; +} +#blue #short { + color: #00f; +} +#blue #long { + color: #0000ff; +} +#blue #rgba { + color: rgba(0, 0, 255, 0.1); +} +#blue #argb { + color: #1a0000ff; +} +#alpha #hsla { + color: rgba(61, 45, 41, 0.6); +} +#overflow .a { + color: #000000; +} +#overflow .b { + color: #ffffff; +} +#overflow .c { + color: #ffffff; +} +#overflow .d { + color: #00ff00; +} +#grey { + color: #c8c8c8; +} +#808080 { + color: #808080; +} +#00ff00 { + color: #00ff00; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/comments.css b/src/dashboard/src/media/vendor/less.js/test/css/comments.css new file mode 100644 index 0000000000..c71896880e --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/comments.css @@ -0,0 +1,52 @@ +/******************\ +* * +* Comment Header * +* * +\******************/ +/* + + Comment + +*/ +/* + * Comment Test + * + * - cloudhead (http://cloudhead.net) + * + */ +/* Colors + * ------ + * #EDF8FC (background blue) + * #166C89 (darkest blue) + * + * Text: + * #333 (standard text) // A comment within a comment! + * #1F9EC9 (standard link) + * + */ +/* @group Variables +------------------- */ +#comments { + /**/ + color: red; + /* A C-style comment */ + + background-color: orange; + font-size: 12px; + /* lost comment */ + content: "content"; + border: 1px solid black; + padding: 0; + margin: 2em; +} +/* commented out + #more-comments { + color: grey; + } +*/ +.selector, .lots, .comments { + color: grey, /* blue */ orange; +} +#last { + color: blue; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/css-3.css b/src/dashboard/src/media/vendor/less.js/test/css/css-3.css new file mode 100644 index 0000000000..a8eea6b055 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/css-3.css @@ -0,0 +1,45 @@ +.comma-delimited { + background: url(bg.jpg) no-repeat, url(bg.png) repeat-x top left, url(bg); + text-shadow: -1px -1px 1px red, 6px 5px 5px yellow; + -moz-box-shadow: 0pt 0pt 2px rgba(255, 255, 255, 0.4) inset, 0pt 4px 6px rgba(255, 255, 255, 0.4) inset; +} +@font-face { + font-family: Headline; + src: local(Futura-Medium), url(fonts.svg#MyGeometricModern) format("svg"); +} +.other { + -moz-transform: translate(0, 11em) rotate(-90deg); +} +p:not([class*="lead"]) { + color: black; +} +input[type="text"].class#id[attr=32]:not(1) { + color: white; +} +div#id.class[a=1][b=2].class:not(1) { + color: white; +} +ul.comma > li:not(:only-child)::after { + color: white; +} +ol.comma > li:nth-last-child(2)::after { + color: white; +} +li:nth-child(4n+1), li:nth-child(-5n), li:nth-child(-n+2) { + color: white; +} +a[href^="http://"] { + color: black; +} +a[href$="http://"] { + color: black; +} +form[data-disabled] { + color: black; +} +p::before { + color: black; +} +#issue322 { + -webkit-animation: anim2 7s infinite ease-in-out; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/css-escapes.css b/src/dashboard/src/media/vendor/less.js/test/css/css-escapes.css new file mode 100644 index 0000000000..194c59faf1 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/css-escapes.css @@ -0,0 +1,20 @@ +.escape\|random\|char { + color: red; +} +.mixin\!tUp { + font-weight: bold; +} +.\34 04 { + background: red; +} +.\34 04 strong { + color: fuchsia; + font-weight: bold; +} +.trailingTest\+ { + color: red; +} +/* This hideous test of hideousness checks for the selector "blockquote" with various permutations of hex escapes */ +\62\6c\6f \63 \6B \0071 \000075o\74 e { + color: silver; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/css.css b/src/dashboard/src/media/vendor/less.js/test/css/css.css new file mode 100644 index 0000000000..b0496cc3c8 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/css.css @@ -0,0 +1,85 @@ +@charset "utf-8"; +div { + color: black; +} +div { + width: 99%; +} +* { + min-width: 45em; +} +h1, h2 > a > p, h3 { + color: none; +} +div.class { + color: blue; +} +div#id { + color: green; +} +.class#id { + color: purple; +} +.one.two.three { + color: grey; +} +@media print { + font-size: 3em; +} +@media screen { + font-size: 10px; +} +@font-face { + font-family: 'Garamond Pro'; + src: url("/fonts/garamond-pro.ttf"); +} +a:hover, a:link { + color: #999; +} +p, p:first-child { + text-transform: none; +} +q:lang(no) { + quotes: none; +} +p + h1 { + font-size: 2.2em; +} +#shorthands { + border: 1px solid #000; + font: 12px/16px Arial; + font: 100%/16px Arial; + margin: 1px 0; + padding: 0 auto; + background: url("http://www.lesscss.org/spec.html") no-repeat 0 4px; +} +#more-shorthands { + margin: 0; + padding: 1px 0 2px 0; + font: normal small/20px 'Trebuchet MS', Verdana, sans-serif; +} +.misc { + -moz-border-radius: 2px; + display: -moz-inline-stack; + width: .1em; + background-color: #009998; + background-image: url(images/image.jpg); + background: -webkit-gradient(linear, left top, left bottom, from(red), to(blue)); + margin: ; + filter: alpha(opacity=100); +} +#important { + color: red !important; + width: 100%!important; + height: 20px ! important; +} +#data-uri { + background: url(data:image/png;charset=utf-8;base64, + kiVBORw0KGgoAAAANSUhEUgAAABAAAAAQAQMAAAAlPW0iAAAABlBMVEUAAAD/ + k//+l2Z/dAAAAM0lEQVR4nGP4/5/h/1+G/58ZDrAz3D/McH8yw83NDDeNGe4U + kg9C9zwz3gVLMDA/A6P9/AFGGFyjOXZtQAAAAAElFTkSuQmCC); + background-image: url(data:image/x-png,f9difSSFIIGFIFJD1f982FSDKAA9==); +} +#svg-data-uri { + background: transparent url('data:image/svg+xml, '); +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/functions.css b/src/dashboard/src/media/vendor/less.js/test/css/functions.css new file mode 100644 index 0000000000..f33b9869b6 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/functions.css @@ -0,0 +1,30 @@ +#functions { + color: #660000; + width: 16; + height: undefined("self"); + border-width: 5; + variable: 11; +} +#built-in { + escaped: -Some::weird(#thing, y); + lighten: #ffcccc; + darken: #330000; + saturate: #203c31; + desaturate: #29332f; + greyscale: #2e2e2e; + spin-p: #bf6a40; + spin-n: #bf4055; + format: "rgb(32, 128, 64)"; + format-string: "hello world"; + format-multiple: "hello earth 2"; + format-url-encode: "red is %23ff0000"; + eformat: rgb(32, 128, 64); + hue: 98; + saturation: 12%; + lightness: 95%; + rounded: 11; + roundedpx: 3px; +} +#alpha { + alpha: rgba(153, 94, 51, 0.6); +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/import.css b/src/dashboard/src/media/vendor/less.js/test/css/import.css new file mode 100644 index 0000000000..320ffeb202 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/import.css @@ -0,0 +1,16 @@ +@import "import-test-d.css"; + +@import url(http://fonts.googleapis.com/css?family=Open+Sans); +#import { + color: red; +} +.mixin { + height: 10px; + color: red; +} +#import-test { + height: 10px; + color: red; + width: 10px; + height: 30%; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/javascript.css b/src/dashboard/src/media/vendor/less.js/test/css/javascript.css new file mode 100644 index 0000000000..5a3f82236d --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/javascript.css @@ -0,0 +1,22 @@ +.eval { + js: 42; + js: 2; + js: "hello world"; + js: 1, 2, 3; + title: "node"; + ternary: true; +} +.scope { + var: 42; + escaped: 7px; +} +.vars { + width: 8; +} +.escape-interpol { + width: hello world; +} +.arrays { + ary: "1, 2, 3"; + ary: "1, 2, 3"; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/lazy-eval.css b/src/dashboard/src/media/vendor/less.js/test/css/lazy-eval.css new file mode 100644 index 0000000000..1adfb8f384 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/lazy-eval.css @@ -0,0 +1,3 @@ +.lazy-eval { + width: 100%; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/media.css b/src/dashboard/src/media/vendor/less.js/test/css/media.css new file mode 100644 index 0000000000..13b6be0af6 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/media.css @@ -0,0 +1,21 @@ +@media print { + .class { + color: blue; + } + .class .sub { + width: 42; + } + .top, header > h1 { + color: #444444; + } +} +@media screen { + body { + max-width: 480; + } +} +@media all and (orientation:portrait) { + aside { + float: none; + } +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/mixins-args.css b/src/dashboard/src/media/vendor/less.js/test/css/mixins-args.css new file mode 100644 index 0000000000..41d606b085 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/mixins-args.css @@ -0,0 +1,61 @@ +#hidden { + color: transparent; + color: transparent; +} +.two-args { + color: blue; + width: 10px; + height: 99%; + border: 2px dotted black; +} +.one-arg { + width: 15px; + height: 49%; +} +.no-parens { + width: 5px; + height: 49%; +} +.no-args { + width: 5px; + height: 49%; +} +.var-args { + width: 45; + height: 17%; +} +.multi-mix { + width: 10px; + height: 29%; + margin: 4; + padding: 5; +} +body { + padding: 30px; + color: #ff0000; +} +.scope-mix { + width: 8; +} +.content { + width: 600px; +} +.content .column { + margin: 600px; +} +#same-var-name { + radius: 5px; +} +#var-inside { + width: 10px; +} +.id-class { + color: red; + color: red; +} +.arguments { + border: 1px solid black; +} +.arguments2 { + border: 0px; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/mixins-closure.css b/src/dashboard/src/media/vendor/less.js/test/css/mixins-closure.css new file mode 100644 index 0000000000..b1021b6fb6 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/mixins-closure.css @@ -0,0 +1,9 @@ +.class { + width: 99px; +} +.overwrite { + width: 99px; +} +.nested .class { + width: 5px; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/mixins-nested.css b/src/dashboard/src/media/vendor/less.js/test/css/mixins-nested.css new file mode 100644 index 0000000000..6378c47561 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/mixins-nested.css @@ -0,0 +1,14 @@ +.class .inner { + height: 300; +} +.class .inner .innest { + width: 30; + border-width: 60; +} +.class2 .inner { + height: 600; +} +.class2 .inner .innest { + width: 60; + border-width: 120; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/mixins-pattern.css b/src/dashboard/src/media/vendor/less.js/test/css/mixins-pattern.css new file mode 100644 index 0000000000..eeaadecfc5 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/mixins-pattern.css @@ -0,0 +1,49 @@ +.zero { + zero: 0; + one: 1; + two: 2; + three: 3; +} +.one { + zero: 0; + one: 1; + one-req: 1; + two: 2; + three: 3; +} +.two { + zero: 0; + one: 1; + two: 2; + three: 3; +} +.three { + zero: 0; + one: 1; + two: 2; + three-req: 3; + three: 3; +} +.left { + left: 1; +} +.right { + right: 1; +} +.border-right { + color: black; + border-right: 4px; +} +.border-left { + color: black; + border-left: 4px; +} +.only-right { + right: 33; +} +.only-left { + left: 33; +} +.left-right { + both: 330; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/mixins.css b/src/dashboard/src/media/vendor/less.js/test/css/mixins.css new file mode 100644 index 0000000000..be9967f0a7 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/mixins.css @@ -0,0 +1,70 @@ +.mixin { + border: 1px solid black; +} +.mixout { + border-color: orange; +} +.borders { + border-style: dashed; +} +#namespace .borders { + border-style: dotted; +} +#namespace .biohazard { + content: "death"; +} +#namespace .biohazard .man { + color: transparent; +} +#theme > .mixin { + background-color: grey; +} +#container { + color: black; + border: 1px solid black; + border-color: orange; + background-color: grey; +} +#header .milk { + color: white; + border: 1px solid black; + background-color: grey; +} +#header #cookie { + border-style: dashed; +} +#header #cookie .chips { + border-style: dotted; +} +#header #cookie .chips .calories { + color: black; + border: 1px solid black; + border-color: orange; + background-color: grey; +} +.secure-zone { + color: transparent; +} +.direct { + border-style: dotted; +} +.bo, .bar { + width: 100%; +} +.bo { + border: 1px; +} +.ar.bo.ca { + color: black; +} +.jo.ki { + background: none; +} +.extended { + width: 100%; + border: 1px; + background: none; +} +.foo .bar { + width: 100%; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/operations.css b/src/dashboard/src/media/vendor/less.js/test/css/operations.css new file mode 100644 index 0000000000..30a941b4f6 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/operations.css @@ -0,0 +1,46 @@ +#operations { + color: #111111; + height: 9px; + width: 3em; + substraction: 0; + division: 1; +} +#operations .spacing { + height: 9px; + width: 3em; +} +.with-variables { + height: 16em; + width: 24em; + size: 1cm; +} +.with-functions { + color: #646464; + color: #ff8080; + color: #c94a4a; +} +.negative { + height: 0px; + width: 4px; +} +.shorthands { + padding: -1px 2px 0 -4px; +} +.colors { + color: #123; + border-color: #334455; + background-color: #000000; +} +.colors .other { + color: #222222; + border-color: #222222; +} +.negations { + variable: -4px; + variable1: 0px; + variable2: 0px; + variable3: 8px; + variable4: 0px; + paren: -4px; + paren2: 16px; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/parens.css b/src/dashboard/src/media/vendor/less.js/test/css/parens.css new file mode 100644 index 0000000000..209255f729 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/parens.css @@ -0,0 +1,20 @@ +.parens { + border: 2px solid black; + margin: 1px 3px 16 3; + width: 36; + padding: 2px 36px; +} +.more-parens { + padding: 8 4 4 4px; + width: 96; + height: 113; + margin: 12; +} +.nested-parens { + width: 71; + height: 6; +} +.mixed-units { + margin: 2px 4em 1 5pc; + padding: 6px 1em 2px 2; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/rulesets.css b/src/dashboard/src/media/vendor/less.js/test/css/rulesets.css new file mode 100644 index 0000000000..3e73bc0c52 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/rulesets.css @@ -0,0 +1,29 @@ +#first > .one { + font-size: 2em; +} +#first > .one > #second .two > #deux { + width: 50%; +} +#first > .one > #second .two > #deux #third { + height: 100%; +} +#first > .one > #second .two > #deux #third:focus { + color: black; +} +#first > .one > #second .two > #deux #third:focus #fifth > #sixth .seventh #eighth + #ninth { + color: purple; +} +#first > .one > #second .two > #deux #fourth, #first > .one > #second .two > #deux #five, #first > .one > #second .two > #deux #six { + color: #110000; +} +#first > .one > #second .two > #deux #fourth .seven, +#first > .one > #second .two > #deux #five .seven, +#first > .one > #second .two > #deux #six .seven, +#first > .one > #second .two > #deux #fourth .eight > #nine, +#first > .one > #second .two > #deux #five .eight > #nine, +#first > .one > #second .two > #deux #six .eight > #nine { + border: 1px solid black; +} +#first > .one > #second .two > #deux #fourth #ten, #first > .one > #second .two > #deux #five #ten, #first > .one > #second .two > #deux #six #ten { + color: red; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/scope.css b/src/dashboard/src/media/vendor/less.js/test/css/scope.css new file mode 100644 index 0000000000..926bfc587d --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/scope.css @@ -0,0 +1,15 @@ +.tiny-scope { + color: #998899; +} +.scope1 { + color: blue; + border-color: black; +} +.scope1 .scope2 { + color: blue; +} +.scope1 .scope2 .scope3 { + color: red; + border-color: black; + background-color: white; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/selectors.css b/src/dashboard/src/media/vendor/less.js/test/css/selectors.css new file mode 100644 index 0000000000..a384f9386e --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/selectors.css @@ -0,0 +1,53 @@ +h1 a:hover, +h2 a:hover, +h3 a:hover, +h1 p:hover, +h2 p:hover, +h3 p:hover { + color: red; +} +#all { + color: blue; +} +#the { + color: blue; +} +#same { + color: blue; +} +ul, +li, +div, +q, +blockquote, +textarea { + margin: 0; +} +td { + margin: 0; + padding: 0; +} +td, input { + line-height: 1em; +} +a { + color: red; +} +a:hover { + color: blue; +} +div a { + color: green; +} +p a span { + color: yellow; +} +.foo .bar .qux, .foo .baz .qux { + display: block; +} +.qux .foo .bar, .qux .foo .baz { + display: inline; +} +.qux .foo .bar .biz, .qux .foo .baz .biz { + display: none; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/strings.css b/src/dashboard/src/media/vendor/less.js/test/css/strings.css new file mode 100644 index 0000000000..13f9329204 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/strings.css @@ -0,0 +1,38 @@ +#strings { + background-image: url("http://son-of-a-banana.com"); + quotes: "~" "~"; + content: "#*%:&^,)!.(~*})"; + empty: ""; + brackets: "{" "}"; +} +#comments { + content: "/* hello */ // not-so-secret"; +} +#single-quote { + quotes: "'" "'"; + content: '""#!&""'; + empty: ''; + semi-colon: ';'; +} +#escaped { + filter: DX.Transform.MS.BS.filter(opacity=50); +} +#one-line { + image: url(http://tooks.com); +} +#crazy { + image: url(http://), "}", url("http://}"); +} +#interpolation { + url: "http://lesscss.org/dev/image.jpg"; + url2: "http://lesscss.org/image-256.jpg"; + url3: "http://lesscss.org#445566"; + url4: "http://lesscss.org/hello"; + url5: "http://lesscss.org/54.4"; +} +.mix-mul-class { + color: blue; + color: red; + color: blue; + color: orange; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/variables.css b/src/dashboard/src/media/vendor/less.js/test/css/variables.css new file mode 100644 index 0000000000..143124d450 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/variables.css @@ -0,0 +1,24 @@ +.variables { + width: 14cm; +} +.variables { + height: 24px; + color: #888888; + font-family: "Trebuchet MS", Verdana, sans-serif; + quotes: "~" "~"; +} +.redefinition { + three: 3; +} +.values { + font-family: 'Trebuchet', 'Trebuchet', 'Trebuchet'; + color: #888888 !important; + url: url('Trebuchet'); + multi: something 'A', B, C, 'Trebuchet'; +} +.variable-names { + name: 'hello'; +} +.alpha { + filter: alpha(opacity=42); +} diff --git a/src/dashboard/src/media/vendor/less.js/test/css/whitespace.css b/src/dashboard/src/media/vendor/less.js/test/css/whitespace.css new file mode 100644 index 0000000000..d40d2fe00d --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/css/whitespace.css @@ -0,0 +1,36 @@ +.whitespace { + color: white; +} +.whitespace { + color: white; +} +.whitespace { + color: white; +} +.whitespace { + color: white; +} +.whitespace { + color: white ; +} +.white, .space, .mania { + color: white; +} +.no-semi-column { + color: white; +} +.no-semi-column { + color: white; + white-space: pre; +} +.no-semi-column { + border: 2px solid white; +} +.newlines { + background: the, + great, + wall; + border: 2px + solid + black; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less-test.js b/src/dashboard/src/media/vendor/less.js/test/less-test.js new file mode 100644 index 0000000000..8dc162e756 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less-test.js @@ -0,0 +1,75 @@ +var path = require('path'), + fs = require('fs'), + sys = require('sys'); + +require.paths.unshift(__dirname, path.join(__dirname, '..')); + +var less = require('lib/less'); + +less.tree.functions.add = function (a, b) { + return new(less.tree.Dimension)(a.value + b.value); +} +less.tree.functions.increment = function (a) { + return new(less.tree.Dimension)(a.value + 1); +} +less.tree.functions.color = function (str) { + if (str.value === "evil red") { return new(less.tree.Color)("600") } +} + +sys.puts("\n" + stylize("LESS", 'underline') + "\n"); + +fs.readdirSync('test/less').forEach(function (file) { + if (! /\.less/.test(file)) { return } + + toCSS('test/less/' + file, function (err, less) { + var name = path.basename(file, '.less'); + + fs.readFile(path.join('test/css', name) + '.css', 'utf-8', function (e, css) { + sys.print("- " + name + ": ") + if (less === css) { sys.print(stylize('OK', 'green')) } + else if (err) { + sys.print(stylize("ERROR: " + (err && err.message), 'red')); + } else { + sys.print(stylize("FAIL", 'yellow')); + } + sys.puts(""); + }); + }); +}); + +function toCSS(path, callback) { + var tree, css; + fs.readFile(path, 'utf-8', function (e, str) { + if (e) { return callback(e) } + + new(less.Parser)({ + paths: [require('path').dirname(path)], + optimization: 0 + }).parse(str, function (err, tree) { + if (err) { + callback(err); + } else { + try { + css = tree.toCSS(); + callback(null, css); + } catch (e) { + callback(e); + } + } + }); + }); +} + +// Stylize a string +function stylize(str, style) { + var styles = { + 'bold' : [1, 22], + 'inverse' : [7, 27], + 'underline' : [4, 24], + 'yellow' : [33, 39], + 'green' : [32, 39], + 'red' : [31, 39] + }; + return '\033[' + styles[style][0] + 'm' + str + + '\033[' + styles[style][1] + 'm'; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/colors.less b/src/dashboard/src/media/vendor/less.js/test/less/colors.less new file mode 100644 index 0000000000..5744e1677f --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/colors.less @@ -0,0 +1,52 @@ +#yelow { + #short { + color: #fea; + } + #long { + color: #ffeeaa; + } + #rgba { + color: rgba(255, 238, 170, 0.1); + } + #argb { + color: argb(rgba(255, 238, 170, 0.1)); + } +} + +#blue { + #short { + color: #00f; + } + #long { + color: #0000ff; + } + #rgba { + color: rgba(0, 0, 255, 0.1); + } + #argb { + color: argb(rgba(0, 0, 255, 0.1)); + } +} + +#alpha #hsla { + color: hsla(11, 20%, 20%, 0.6); +} + +#overflow { + .a { color: #111111 - #444444; } // #000000 + .b { color: #eee + #fff; } // #ffffff + .c { color: #aaa * 3; } // #ffffff + .d { color: #00ee00 + #009900; } // #00ff00 +} + +#grey { + color: rgb(200, 200, 200); +} + +#808080 { + color: hsl(50, 0%, 50%); +} + +#00ff00 { + color: hsl(120, 100%, 50%); +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/comments.less b/src/dashboard/src/media/vendor/less.js/test/less/comments.less new file mode 100644 index 0000000000..102dd68411 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/comments.less @@ -0,0 +1,63 @@ +/******************\ +* * +* Comment Header * +* * +\******************/ + +/* + + Comment + +*/ + +/* + * Comment Test + * + * - cloudhead (http://cloudhead.net) + * + */ + +//////////////// +@var: "content"; +//////////////// + +/* Colors + * ------ + * #EDF8FC (background blue) + * #166C89 (darkest blue) + * + * Text: + * #333 (standard text) // A comment within a comment! + * #1F9EC9 (standard link) + * + */ + +/* @group Variables +------------------- */ +#comments /* boo */ { + /**/ // An empty comment + color: red; /* A C-style comment */ + background-color: orange; // A little comment + font-size: 12px; + + /* lost comment */ content: @var; + + border: 1px solid black; + + // padding & margin // + padding: 0; // }{ '" + margin: 2em; +} // + +/* commented out + #more-comments { + color: grey; + } +*/ + +.selector /* .with */, .lots, /* of */ .comments { + color: grey, /* blue */ orange; +} + +#last { color: blue } +// diff --git a/src/dashboard/src/media/vendor/less.js/test/less/css-3.less b/src/dashboard/src/media/vendor/less.js/test/less/css-3.less new file mode 100644 index 0000000000..daaf051e05 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/css-3.less @@ -0,0 +1,59 @@ +.comma-delimited { + background: url(bg.jpg) no-repeat, url(bg.png) repeat-x top left, url(bg); + text-shadow: -1px -1px 1px red, 6px 5px 5px yellow; + -moz-box-shadow: 0pt 0pt 2px rgba(255, 255, 255, 0.4) inset, + 0pt 4px 6px rgba(255, 255, 255, 0.4) inset; +} +@font-face { + font-family: Headline; + src: local(Futura-Medium), + url(fonts.svg#MyGeometricModern) format("svg"); +} +.other { + -moz-transform: translate(0, 11em) rotate(-90deg); +} +p:not([class*="lead"]) { + color: black; +} + +input[type="text"].class#id[attr=32]:not(1) { + color: white; +} + +div#id.class[a=1][b=2].class:not(1) { + color: white; +} + +ul.comma > li:not(:only-child)::after { + color: white; +} + +ol.comma > li:nth-last-child(2)::after { + color: white; +} + +li:nth-child(4n+1), +li:nth-child(-5n), +li:nth-child(-n+2) { + color: white; +} + +a[href^="http://"] { + color: black; +} + +a[href$="http://"] { + color: black; +} + +form[data-disabled] { + color: black; +} + +p::before { + color: black; +} + +#issue322 { + -webkit-animation: anim2 7s infinite ease-in-out; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/css-escapes.less b/src/dashboard/src/media/vendor/less.js/test/less/css-escapes.less new file mode 100644 index 0000000000..a2893e5819 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/css-escapes.less @@ -0,0 +1,28 @@ +@ugly: fuchsia; + +.escape\|random\|char { + color: red; +} + +.mixin\!tUp { + font-weight: bold; +} + +// class="404" +.\34 04 { + background: red; + + strong { + color: @ugly; + .mixin\!tUp; + } +} + +.trailingTest\+ { + color: red; +} + +/* This hideous test of hideousness checks for the selector "blockquote" with various permutations of hex escapes */ +\62\6c\6f \63 \6B \0071 \000075o\74 e { + color: silver; +} \ No newline at end of file diff --git a/src/dashboard/src/media/vendor/less.js/test/less/css.less b/src/dashboard/src/media/vendor/less.js/test/less/css.less new file mode 100644 index 0000000000..3654cf8270 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/css.less @@ -0,0 +1,100 @@ +@charset "utf-8"; +div { color: black; } +div { width: 99%; } + +* { + min-width: 45em; +} + +h1, h2 > a > p, h3 { + color: none; +} + +div.class { + color: blue; +} + +div#id { + color: green; +} + +.class#id { + color: purple; +} + +.one.two.three { + color: grey; +} + +@media print { + font-size: 3em; +} + +@media screen { + font-size: 10px; +} + +@font-face { + font-family: 'Garamond Pro'; + src: url("/fonts/garamond-pro.ttf"); +} + +a:hover, a:link { + color: #999; +} + +p, p:first-child { + text-transform: none; +} + +q:lang(no) { + quotes: none; +} + +p + h1 { + font-size: 2.2em; +} + +#shorthands { + border: 1px solid #000; + font: 12px/16px Arial; + font: 100%/16px Arial; + margin: 1px 0; + padding: 0 auto; + background: url("http://www.lesscss.org/spec.html") no-repeat 0 4px; +} + +#more-shorthands { + margin: 0; + padding: 1px 0 2px 0; + font: normal small/20px 'Trebuchet MS', Verdana, sans-serif; +} + +.misc { + -moz-border-radius: 2px; + display: -moz-inline-stack; + width: .1em; + background-color: #009998; + background-image: url(images/image.jpg); + background: -webkit-gradient(linear, left top, left bottom, from(red), to(blue)); + margin: ; + filter: alpha(opacity=100); +} + +#important { + color: red !important; + width: 100%!important; + height: 20px ! important; +} + +#data-uri { + background: url(data:image/png;charset=utf-8;base64, + kiVBORw0KGgoAAAANSUhEUgAAABAAAAAQAQMAAAAlPW0iAAAABlBMVEUAAAD/ + k//+l2Z/dAAAAM0lEQVR4nGP4/5/h/1+G/58ZDrAz3D/McH8yw83NDDeNGe4U + kg9C9zwz3gVLMDA/A6P9/AFGGFyjOXZtQAAAAAElFTkSuQmCC); + background-image: url(data:image/x-png,f9difSSFIIGFIFJD1f982FSDKAA9==); +} + +#svg-data-uri { + background: transparent url('data:image/svg+xml, '); +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/functions.less b/src/dashboard/src/media/vendor/less.js/test/less/functions.less new file mode 100644 index 0000000000..1af78bc809 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/functions.less @@ -0,0 +1,35 @@ +#functions { + @var: 10; + color: color("evil red"); // #660000 + width: increment(15); + height: undefined("self"); + border-width: add(2, 3); + variable: increment(@var); +} + +#built-in { + @r: 32; + escaped: e("-Some::weird(#thing, y)"); + lighten: lighten(#ff0000, 40%); + darken: darken(#ff0000, 40%); + saturate: saturate(#29332f, 20%); + desaturate: desaturate(#203c31, 20%); + greyscale: greyscale(#203c31); + spin-p: spin(hsl(340, 50%, 50%), 40); + spin-n: spin(hsl(30, 50%, 50%), -40); + format: %("rgb(%d, %d, %d)", @r, 128, 64); + format-string: %("hello %s", "world"); + format-multiple: %("hello %s %d", "earth", 2); + format-url-encode: %('red is %A', #ff0000); + eformat: e(%("rgb(%d, %d, %d)", @r, 128, 64)); + + hue: hue(hsl(98, 12%, 95%)); + saturation: saturation(hsl(98, 12%, 95%)); + lightness: lightness(hsl(98, 12%, 95%)); + rounded: round(@r/3); + roundedpx: round(10px / 3); +} + +#alpha { + alpha: darken(hsla(25, 50%, 50%, 0.6), 10%); +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/import.less b/src/dashboard/src/media/vendor/less.js/test/less/import.less new file mode 100644 index 0000000000..42be3c1e54 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/import.less @@ -0,0 +1,9 @@ +@import url("import/import-test-a.less"); +//@import url("import/import-test-a.less"); +@import url(http://fonts.googleapis.com/css?family=Open+Sans); + +#import-test { + .mixin; + width: 10px; + height: @a + 10%; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/import/import-test-a.less b/src/dashboard/src/media/vendor/less.js/test/less/import/import-test-a.less new file mode 100644 index 0000000000..7409d07148 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/import/import-test-a.less @@ -0,0 +1,2 @@ +@import "import-test-b.less"; +@a: 20%; diff --git a/src/dashboard/src/media/vendor/less.js/test/less/import/import-test-b.less b/src/dashboard/src/media/vendor/less.js/test/less/import/import-test-b.less new file mode 100644 index 0000000000..ce2d35a83d --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/import/import-test-b.less @@ -0,0 +1,8 @@ +@import "import-test-c"; + +@b: 100%; + +.mixin { + height: 10px; + color: @c; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/import/import-test-c.less b/src/dashboard/src/media/vendor/less.js/test/less/import/import-test-c.less new file mode 100644 index 0000000000..111266ba84 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/import/import-test-c.less @@ -0,0 +1,7 @@ + +@import "import-test-d.css"; +@c: red; + +#import { + color: @c; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/import/import-test-d.css b/src/dashboard/src/media/vendor/less.js/test/less/import/import-test-d.css new file mode 100644 index 0000000000..30575f0186 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/import/import-test-d.css @@ -0,0 +1 @@ +#css { color: yellow; } diff --git a/src/dashboard/src/media/vendor/less.js/test/less/javascript.less b/src/dashboard/src/media/vendor/less.js/test/less/javascript.less new file mode 100644 index 0000000000..bfaf897657 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/javascript.less @@ -0,0 +1,27 @@ +.eval { + js: `42`; + js: `1 + 1`; + js: `"hello world"`; + js: `[1, 2, 3]`; + title: `process.title`; + ternary: `(1 + 1 == 2 ? true : false)`; +} +.scope { + @foo: 42; + var: `this.foo.toJS()`; + escaped: ~`2 + 5 + 'px'`; +} +.vars { + @var: `4 + 4`; + width: @var; +} +.escape-interpol { + @world: "world"; + width: ~`"hello" + " " + @{world}`; +} +.arrays { + @ary: 1, 2, 3; + @ary2: 1 2 3; + ary: `@{ary}.join(', ')`; + ary: `@{ary2}.join(', ')`; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/lazy-eval.less b/src/dashboard/src/media/vendor/less.js/test/less/lazy-eval.less new file mode 100644 index 0000000000..72b3fd46ef --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/lazy-eval.less @@ -0,0 +1,6 @@ +@var: @a; +@a: 100%; + +.lazy-eval { + width: @var; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/media.less b/src/dashboard/src/media/vendor/less.js/test/less/media.less new file mode 100644 index 0000000000..0b08a59184 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/media.less @@ -0,0 +1,25 @@ + +// For now, variables can't be declared inside @media blocks. + +@var: 42; + +@media print { + .class { + color: blue; + .sub { + width: @var; + } + } + .top, header > h1 { + color: #222 * 2; + } +} + +@media screen { + @base: 8; + body { max-width: @base * 60; } +} + +@media all and (orientation:portrait) { + aside { float: none; } +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/mixins-args.less b/src/dashboard/src/media/vendor/less.js/test/less/mixins-args.less new file mode 100644 index 0000000000..0b202ddabe --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/mixins-args.less @@ -0,0 +1,118 @@ +.mixin (@a: 1px, @b: 50%) { + width: @a * 5; + height: @b - 1%; +} + +.mixina (@style, @width, @color: black) { + border: @width @style @color; +} + +.mixiny +(@a: 0, @b: 0) { + margin: @a; + padding: @b; +} + +.hidden() { + color: transparent; // asd +} + +#hidden { + .hidden; + .hidden(); +} + +.two-args { + color: blue; + .mixin(2px, 100%); + .mixina(dotted, 2px); +} + +.one-arg { + .mixin(3px); +} + +.no-parens { + .mixin; +} + +.no-args { + .mixin(); +} + +.var-args { + @var: 9; + .mixin(@var, @var * 2); +} + +.multi-mix { + .mixin(2px, 30%); + .mixiny(4, 5); +} + +.maxa(@arg1: 10, @arg2: #f00) { + padding: @arg1 * 2px; + color: @arg2; +} + +body { + .maxa(15); +} + +@glob: 5; +.global-mixin(@a:2) { + width: @glob + @a; +} + +.scope-mix { + .global-mixin(3); +} + +.nested-ruleset (@width: 200px) { + width: @width; + .column { margin: @width; } +} +.content { + .nested-ruleset(600px); +} + +// + +.same-var-name2(@radius) { + radius: @radius; +} +.same-var-name(@radius) { + .same-var-name2(@radius); +} +#same-var-name { + .same-var-name(5px); +} + +// + +.var-inside () { + @var: 10px; + width: @var; +} +#var-inside { .var-inside; } + +// # mixins + +#id-mixin () { + color: red; +} +.id-class { + #id-mixin(); + #id-mixin; +} + +.mixin-arguments (@width: 0px) { + border: @arguments; +} + +.arguments { + .mixin-arguments(1px, solid, black); +} +.arguments2 { + .mixin-arguments(); +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/mixins-closure.less b/src/dashboard/src/media/vendor/less.js/test/less/mixins-closure.less new file mode 100644 index 0000000000..01251d2ad5 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/mixins-closure.less @@ -0,0 +1,26 @@ +.scope { + @var: 99px; + .mixin () { + width: @var; + } +} + +.class { + .scope > .mixin; +} + +.overwrite { + @var: 0px; + .scope > .mixin; +} + +.nested { + @var: 5px; + .mixin () { + width: @var; + } + .class { + @var: 10px; + .mixin; + } +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/mixins-nested.less b/src/dashboard/src/media/vendor/less.js/test/less/mixins-nested.less new file mode 100644 index 0000000000..d086279c8f --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/mixins-nested.less @@ -0,0 +1,22 @@ +.mix-inner (@var) { + border-width: @var; +} + +.mix (@a: 10) { + .inner { + height: @a * 10; + + .innest { + width: @a; + .mix-inner(@a * 2); + } + } +} + +.class { + .mix(30); +} + +.class2 { + .mix(60); +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/mixins-pattern.less b/src/dashboard/src/media/vendor/less.js/test/less/mixins-pattern.less new file mode 100644 index 0000000000..6392df02d0 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/mixins-pattern.less @@ -0,0 +1,96 @@ +.mixin () { + zero: 0; +} +.mixin (@a: 1px) { + one: 1; +} +.mixin (@a) { + one-req: 1; +} +.mixin (@a: 1px, @b: 2px) { + two: 2; +} + +.mixin (@a, @b, @c) { + three-req: 3; +} + +.mixin (@a: 1px, @b: 2px, @c: 3px) { + three: 3; +} + +.zero { + .mixin(); +} + +.one { + .mixin(1); +} + +.two { + .mixin(1, 2); +} + +.three { + .mixin(1, 2, 3); +} + +// + +.mixout ('left') { + left: 1; +} + +.mixout ('right') { + right: 1; +} + +.left { + .mixout('left'); +} +.right { + .mixout('right'); +} + +// + +.border (@side, @width) { + color: black; + .border-side(@side, @width); +} +.border-side (left, @w) { + border-left: @w; +} +.border-side (right, @w) { + border-right: @w; +} + +.border-right { + .border(right, 4px); +} +.border-left { + .border(left, 4px); +} + +// + + +.border-radius (@r) { + both: @r * 10; +} +.border-radius (@r, left) { + left: @r; +} +.border-radius (@r, right) { + right: @r; +} + +.only-right { + .border-radius(33, right); +} +.only-left { + .border-radius(33, left); +} +.left-right { + .border-radius(33); +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/mixins.less b/src/dashboard/src/media/vendor/less.js/test/less/mixins.less new file mode 100644 index 0000000000..2fd761174a --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/mixins.less @@ -0,0 +1,67 @@ +.mixin { border: 1px solid black; } +.mixout { border-color: orange; } +.borders { border-style: dashed; } + +#namespace { + .borders { + border-style: dotted; + } + .biohazard { + content: "death"; + .man { + color: transparent; + } + } +} +#theme { + > .mixin { + background-color: grey; + } +} +#container { + color: black; + .mixin; + .mixout; + #theme > .mixin; +} + +#header { + .milk { + color: white; + .mixin; + #theme > .mixin; + } + #cookie { + .chips { + #namespace .borders; + .calories { + #container; + } + } + .borders; + } +} +.secure-zone { #namespace .biohazard .man; } +.direct { + #namespace > .borders; +} + +.bo, .bar { + width: 100%; +} +.bo { + border: 1px; +} +.ar.bo.ca { + color: black; +} +.jo.ki { + background: none; +} +.extended { + .bo; + .jo.ki; +} +.foo .bar { + .bar; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/operations.less b/src/dashboard/src/media/vendor/less.js/test/less/operations.less new file mode 100644 index 0000000000..e7c974b346 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/operations.less @@ -0,0 +1,58 @@ +#operations { + color: #110000 + #000011 + #001100; // #111111 + height: 10px / 2px + 6px - 1px * 2; // 9px + width: 2 * 4 - 5em; // 3em + .spacing { + height: 10px / 2px+6px-1px*2; + width: 2 * 4-5em; + } + substraction: 20 - 10 - 5 - 5; // 0 + division: 20 / 5 / 4; // 1 +} + +@x: 4; +@y: 12em; + +.with-variables { + height: @x + @y; // 16em + width: 12 + @y; // 24em + size: 5cm - @x; // 1cm +} + +.with-functions { + color: rgb(200, 200, 200) / 2; + color: 2 * hsl(0, 50%, 50%); + color: rgb(10, 10, 10) + hsl(0, 50%, 50%); +} + +@z: -2; + +.negative { + height: 2px + @z; // 0px + width: 2px - @z; // 4px +} + +.shorthands { + padding: -1px 2px 0 -4px; // +} + +.colors { + color: #123; // #112233 + border-color: #234 + #111111; // #334455 + background-color: #222222 - #fff; // #000000 + .other { + color: 2 * #111; // #222222 + border-color: #333333 / 3 + #111; // #222222 + } +} + +.negations { + @var: 4px; + variable: -@var; // 4 + variable1: -@var + @var; // 0 + variable2: @var + -@var; // 0 + variable3: @var - -@var; // 8 + variable4: -@var - -@var; // 0 + paren: -(@var); // -4px + paren2: -(2 + 2) * -@var; // 16 +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/parens.less b/src/dashboard/src/media/vendor/less.js/test/less/parens.less new file mode 100644 index 0000000000..e020c7eb3c --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/parens.less @@ -0,0 +1,26 @@ +.parens { + @var: 1px; + border: (@var * 2) solid black; + margin: (@var * 1) (@var + 2) (4 * 4) 3; + width: (6 * 6); + padding: 2px (6px * 6px); +} + +.more-parens { + @var: (2 * 2); + padding: (2 * @var) 4 4 (@var * 1px); + width: (@var * @var) * 6; + height: (7 * 7) + (8 * 8); + margin: 4 * (5 + 5) / 2 - (@var * 2); + //margin: (6 * 6)px; +} + +.nested-parens { + width: 2 * (4 * (2 + (1 + 6))) - 1; + height: ((2+3)*(2+3) / (9-4)) + 1; +} + +.mixed-units { + margin: 2px 4em 1 5pc; + padding: (2px + 4px) 1em 2px 2; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/rulesets.less b/src/dashboard/src/media/vendor/less.js/test/less/rulesets.less new file mode 100644 index 0000000000..e81192dbc7 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/rulesets.less @@ -0,0 +1,30 @@ +#first > .one { + > #second .two > #deux { + width: 50%; + #third { + &:focus { + color: black; + #fifth { + > #sixth { + .seventh #eighth { + + #ninth { + color: purple; + } + } + } + } + } + height: 100%; + } + #fourth, #five, #six { + color: #110000; + .seven, .eight > #nine { + border: 1px solid black; + } + #ten { + color: red; + } + } + } + font-size: 2em; +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/scope.less b/src/dashboard/src/media/vendor/less.js/test/less/scope.less new file mode 100644 index 0000000000..da664626d4 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/scope.less @@ -0,0 +1,32 @@ +@x: blue; +@z: transparent; +@mix: none; + +.mixin { + @mix: #989; +} + +.tiny-scope { + color: @mix; // #989 + .mixin; +} + +.scope1 { + @y: orange; + @z: black; + color: @x; // blue + border-color: @z; // black + .hidden { + @x: #131313; + } + .scope2 { + @y: red; + color: @x; // blue + .scope3 { + @local: white; + color: @y; // red + border-color: @z; // black + background-color: @local; // white + } + } +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/selectors.less b/src/dashboard/src/media/vendor/less.js/test/less/selectors.less new file mode 100644 index 0000000000..5bc2bb1f8b --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/selectors.less @@ -0,0 +1,48 @@ +h1, h2, h3 { + a, p { + &:hover { + color: red; + } + } +} + +#all { color: blue; } +#the { color: blue; } +#same { color: blue; } + +ul, li, div, q, blockquote, textarea { + margin: 0; +} + +td { + margin: 0; + padding: 0; +} + +td, input { + line-height: 1em; +} + +a { + color: red; + + &:hover { color: blue; } + + div & { color: green; } + + p & span { color: yellow; } +} + +.foo { + .bar, .baz { + & .qux { + display: block; + } + .qux & { + display: inline; + } + .qux & .biz { + display: none; + } + } +} \ No newline at end of file diff --git a/src/dashboard/src/media/vendor/less.js/test/less/strings.less b/src/dashboard/src/media/vendor/less.js/test/less/strings.less new file mode 100644 index 0000000000..026d2202b8 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/strings.less @@ -0,0 +1,49 @@ +#strings { + background-image: url("http://son-of-a-banana.com"); + quotes: "~" "~"; + content: "#*%:&^,)!.(~*})"; + empty: ""; + brackets: "{" "}"; +} +#comments { + content: "/* hello */ // not-so-secret"; +} +#single-quote { + quotes: "'" "'"; + content: '""#!&""'; + empty: ''; + semi-colon: ';'; +} +#escaped { + filter: ~"DX.Transform.MS.BS.filter(opacity=50)"; +} +#one-line { image: url(http://tooks.com) } +#crazy { image: url(http://), "}", url("http://}") } +#interpolation { + @var: '/dev'; + url: "http://lesscss.org@{var}/image.jpg"; + + @var2: 256; + url2: "http://lesscss.org/image-@{var2}.jpg"; + + @var3: #456; + url3: "http://lesscss.org@{var3}"; + + @var4: hello; + url4: "http://lesscss.org/@{var4}"; + + @var5: 54.4px; + url5: "http://lesscss.org/@{var5}"; +} + +// multiple calls with string interpolation + +.mix-mul (@a: green) { + color: ~"@{a}"; +} +.mix-mul-class { + .mix-mul(blue); + .mix-mul(red); + .mix-mul(blue); + .mix-mul(orange); +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/variables.less b/src/dashboard/src/media/vendor/less.js/test/less/variables.less new file mode 100644 index 0000000000..87c44aeb2a --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/variables.less @@ -0,0 +1,50 @@ +@a: 2; +@x: @a * @a; +@y: @x + 1; +@z: @x * 2 + @y; + +.variables { + width: @z + 1cm; // 14cm +} + +@b: @a * 10; +@c: #888; + +@fonts: "Trebuchet MS", Verdana, sans-serif; +@f: @fonts; + +@quotes: "~" "~"; +@q: @quotes; + +.variables { + height: @b + @x + 0px; // 24px + color: @c; + font-family: @f; + quotes: @q; +} + +.redefinition { + @var: 4; + @var: 2; + @var: 3; + three: @var; +} + +.values { + @a: 'Trebuchet'; + @multi: 'A', B, C; + font-family: @a, @a, @a; + color: @c !important; + url: url(@a); + multi: something @multi, @a; +} + +.variable-names { + @var: 'hello'; + @name: 'var'; + name: @@name; +} +.alpha { + @var: 42; + filter: alpha(opacity=@var); +} diff --git a/src/dashboard/src/media/vendor/less.js/test/less/whitespace.less b/src/dashboard/src/media/vendor/less.js/test/less/whitespace.less new file mode 100644 index 0000000000..cc0a8a3243 --- /dev/null +++ b/src/dashboard/src/media/vendor/less.js/test/less/whitespace.less @@ -0,0 +1,37 @@ + + +.whitespace + { color: white; } + +.whitespace +{ + color: white; +} + .whitespace +{ color: white; } + +.whitespace{color:white;} +.whitespace { color : white ; } + +.white, +.space, +.mania +{ color: white; } + +.no-semi-column { color: white } +.no-semi-column { + color: white; + white-space: pre +} +.no-semi-column {border: 2px solid white} +.newlines { + background: the, + great, + wall; + border: 2px + solid + black; +} +.empty { + +} diff --git a/src/dashboard/src/media/vendor/select2/README.md b/src/dashboard/src/media/vendor/select2/README.md new file mode 100644 index 0000000000..b12438067e --- /dev/null +++ b/src/dashboard/src/media/vendor/select2/README.md @@ -0,0 +1,38 @@ +Select2 +================= + +Select2 is a jQuery based replacement for select boxes. It supports searching, remote data sets, and infinite scrolling of results. Look and feel of Select2 is based on the excellent [Chosen](http://harvesthq.github.com/chosen/) library. + +To get started -- checkout http://ivaynberg.github.com/select2! + +Bug tracker +----------- + +Have a bug? Please create an issue here on GitHub! + +https://github.com/ivaynberg/select2/issues + + +Mailing list +------------ + +Have a question? Ask on our mailing list! + +select2@googlegroups.com + +https://groups.google.com/d/forum/select2 + + +Copyright and License +--------------------- + +Copyright 2012 Igor Vaynberg + +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this work except in +compliance with the License. You may obtain a copy of the License in the LICENSE file, or at: + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software distributed under the License is +distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and limitations under the License. \ No newline at end of file diff --git a/src/dashboard/src/media/vendor/select2/select2.css b/src/dashboard/src/media/vendor/select2/select2.css new file mode 100644 index 0000000000..20c03d9bdd --- /dev/null +++ b/src/dashboard/src/media/vendor/select2/select2.css @@ -0,0 +1,411 @@ +/* +Version: 2.0 Timestamp: Wed, May 16, 2012 10:38:37 AM +*/ +.select2-container { + position: relative; + display: inline-block; + /* inline-block for ie7 */ + zoom: 1; + *display: inline; + +} + +.select2-container, +.select2-drop, +.select2-search, +.select2-container .select2-search input{ + /* + Force border-box so that % widths fit the parent + container without overlap because of margin/padding. + + More Info : http://www.quirksmode.org/css/box.html + */ + -moz-box-sizing: border-box; /* firefox */ + -ms-box-sizing: border-box; /* ie */ + -webkit-box-sizing: border-box; /* webkit */ + -khtml-box-sizing: border-box; /* konqueror */ + box-sizing: border-box; /* css3 */ +} + +.select2-container .select2-choice { + background-color: #fff; + background-image: -webkit-gradient(linear, left bottom, left top, color-stop(0, #eeeeee), color-stop(0.5, white)); + background-image: -webkit-linear-gradient(center bottom, #eeeeee 0%, white 50%); + background-image: -moz-linear-gradient(center bottom, #eeeeee 0%, white 50%); + background-image: -o-linear-gradient(bottom, #eeeeee 0%, #ffffff 50%); + background-image: -ms-linear-gradient(top, #eeeeee 0%, #ffffff 50%); + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr = '#eeeeee', endColorstr = '#ffffff', GradientType = 0); + background-image: linear-gradient(top, #eeeeee 0%, #ffffff 50%); + -webkit-border-radius: 4px; + -moz-border-radius: 4px; + border-radius: 4px; + -moz-background-clip: padding; + -webkit-background-clip: padding-box; + background-clip: padding-box; + border: 1px solid #aaa; + display: block; + overflow: hidden; + white-space: nowrap; + position: relative; + height: 26px; + line-height: 26px; + padding: 0 0 0 8px; + color: #444; + text-decoration: none; +} + +.select2-container .select2-choice span { + margin-right: 26px; + display: block; + overflow: hidden; + white-space: nowrap; + -o-text-overflow: ellipsis; + -ms-text-overflow: ellipsis; + text-overflow: ellipsis; +} + +.select2-container .select2-choice abbr { + display: block; + position: absolute; + right: 26px; + top: 8px; + width: 12px; + height: 12px; + font-size: 1px; + background: url(select2.png) right top no-repeat; + cursor: pointer; + text-decoration: none; + border:0; + outline: 0; +} +.select2-container .select2-choice abbr:hover { + background-position: right -11px; + cursor: pointer; +} + +.select2-container .select2-drop { + background: #fff; + border: 1px solid #aaa; + border-top: 0; + position: absolute; + top: 100%; + -webkit-box-shadow: 0 4px 5px rgba(0, 0, 0, .15); + -moz-box-shadow: 0 4px 5px rgba(0, 0, 0, .15); + -o-box-shadow: 0 4px 5px rgba(0, 0, 0, .15); + box-shadow: 0 4px 5px rgba(0, 0, 0, .15); + z-index: 999; + width:100%; + margin-top:-1px; + + -webkit-border-radius: 0 0 4px 4px; + -moz-border-radius: 0 0 4px 4px; + border-radius: 0 0 4px 4px; +} + +.select2-container .select2-choice div { + -webkit-border-radius: 0 4px 4px 0; + -moz-border-radius: 0 4px 4px 0; + border-radius: 0 4px 4px 0; + -moz-background-clip: padding; + -webkit-background-clip: padding-box; + background-clip: padding-box; + background: #ccc; + background-image: -webkit-gradient(linear, left bottom, left top, color-stop(0, #ccc), color-stop(0.6, #eee)); + background-image: -webkit-linear-gradient(center bottom, #ccc 0%, #eee 60%); + background-image: -moz-linear-gradient(center bottom, #ccc 0%, #eee 60%); + background-image: -o-linear-gradient(bottom, #ccc 0%, #eee 60%); + background-image: -ms-linear-gradient(top, #cccccc 0%, #eeeeee 60%); + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr = '#cccccc', endColorstr = '#eeeeee', GradientType = 0); + background-image: linear-gradient(top, #cccccc 0%, #eeeeee 60%); + border-left: 1px solid #aaa; + position: absolute; + right: 0; + top: 0; + display: block; + height: 100%; + width: 18px; +} + +.select2-container .select2-choice div b { + background: url('select2.png') no-repeat 0 1px; + display: block; + width: 100%; + height: 100%; +} + +.select2-container .select2-search { + display: inline-block; + white-space: nowrap; + z-index: 1010; + min-height: 26px; + width: 100%; + margin: 0; + padding-left: 4px; + padding-right: 4px; +} + +.select2-container .select2-search input { + background: #fff url('select2.png') no-repeat 100% -22px; + background: url('select2.png') no-repeat 100% -22px, -webkit-gradient(linear, left bottom, left top, color-stop(0.85, white), color-stop(0.99, #eeeeee)); + background: url('select2.png') no-repeat 100% -22px, -webkit-linear-gradient(center bottom, white 85%, #eeeeee 99%); + background: url('select2.png') no-repeat 100% -22px, -moz-linear-gradient(center bottom, white 85%, #eeeeee 99%); + background: url('select2.png') no-repeat 100% -22px, -o-linear-gradient(bottom, white 85%, #eeeeee 99%); + background: url('select2.png') no-repeat 100% -22px, -ms-linear-gradient(top, #ffffff 85%, #eeeeee 99%); + background: url('select2.png') no-repeat 100% -22px, linear-gradient(top, #ffffff 85%, #eeeeee 99%); + padding: 4px 20px 4px 5px; + outline: 0; + border: 1px solid #aaa; + font-family: sans-serif; + font-size: 1em; + width:100%; + margin:0; + height:auto !important; + min-height: 26px; + -webkit-box-shadow: none; + -moz-box-shadow: none; + box-shadow: none; + border-radius: 0; + -moz-border-radius: 0; + -webkit-border-radius: 0; +} + +.select2-container .select2-search input.select2-active { + background: #fff url('spinner.gif') no-repeat 100%; + background: url('spinner.gif') no-repeat 100%, -webkit-gradient(linear, left bottom, left top, color-stop(0.85, white), color-stop(0.99, #eeeeee)); + background: url('spinner.gif') no-repeat 100%, -webkit-linear-gradient(center bottom, white 85%, #eeeeee 99%); + background: url('spinner.gif') no-repeat 100%, -moz-linear-gradient(center bottom, white 85%, #eeeeee 99%); + background: url('spinner.gif') no-repeat 100%, -o-linear-gradient(bottom, white 85%, #eeeeee 99%); + background: url('spinner.gif') no-repeat 100%, -ms-linear-gradient(top, #ffffff 85%, #eeeeee 99%); + background: url('spinner.gif') no-repeat 100%, linear-gradient(top, #ffffff 85%, #eeeeee 99%); +} + + +.select2-container-active .select2-choice, +.select2-container-active .select2-choices { + -webkit-box-shadow: 0 0 5px rgba(0,0,0,.3); + -moz-box-shadow : 0 0 5px rgba(0,0,0,.3); + -o-box-shadow : 0 0 5px rgba(0,0,0,.3); + box-shadow : 0 0 5px rgba(0,0,0,.3); + border: 1px solid #5897fb; + outline: none; +} + +.select2-dropdown-open .select2-choice { + border: 1px solid #aaa; + border-bottom-color: transparent; + -webkit-box-shadow: 0 1px 0 #fff inset; + -moz-box-shadow : 0 1px 0 #fff inset; + -o-box-shadow : 0 1px 0 #fff inset; + box-shadow : 0 1px 0 #fff inset; + background-color: #eee; + background-image: -webkit-gradient(linear, left bottom, left top, color-stop(0, white), color-stop(0.5, #eeeeee)); + background-image: -webkit-linear-gradient(center bottom, white 0%, #eeeeee 50%); + background-image: -moz-linear-gradient(center bottom, white 0%, #eeeeee 50%); + background-image: -o-linear-gradient(bottom, white 0%, #eeeeee 50%); + background-image: -ms-linear-gradient(top, #ffffff 0%,#eeeeee 50%); + filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#ffffff', endColorstr='#eeeeee',GradientType=0 ); + background-image: linear-gradient(top, #ffffff 0%,#eeeeee 50%); + -webkit-border-bottom-left-radius : 0; + -webkit-border-bottom-right-radius: 0; + -moz-border-radius-bottomleft : 0; + -moz-border-radius-bottomright: 0; + border-bottom-left-radius : 0; + border-bottom-right-radius: 0; +} + +.select2-dropdown-open .select2-choice div { + background: transparent; + border-left: none; +} +.select2-dropdown-open .select2-choice div b { + background-position: -18px 1px; +} + +/* results */ +.select2-container .select2-results { + margin: 4px 4px 4px 0; + padding: 0 0 0 4px; + position: relative; + overflow-x: hidden; + overflow-y: auto; + max-height: 200px; +} +.select2-container .select2-results li { + line-height: 80%; + padding: 7px 7px 8px; + margin: 0; + list-style: none; + cursor: pointer; + display: list-item; +} + +.select2-container .select2-results .select2-highlighted { + background: #3875d7; + color: #fff; +} +.select2-container .select2-results li em { + background: #feffde; + font-style: normal; +} +.select2-container .select2-results .select2-highlighted em { + background: transparent; +} +.select2-container .select2-results .select2-no-results { + background: #f4f4f4; + display: list-item; +} + +/* +disabled look for already selected choices in the results dropdown +.select2-container .select2-results .select2-disabled.select2-highlighted { + color: #666; + background: #f4f4f4; + display: list-item; + cursor: default; +} +.select2-container .select2-results .select2-disabled { + background: #f4f4f4; + display: list-item; + cursor: default; +} +*/ +.select2-container .select2-results .select2-disabled { + display: none; +} + +.select2-more-results.select2-active { + background: #f4f4f4 url('spinner.gif') no-repeat 100%; +} + +.select2-more-results { + background: #f4f4f4; + display: list-item; +} + +/* multiselect */ + +.select2-container-multi .select2-choices { + background-color: #fff; + background-image: -webkit-gradient(linear, 0% 0%, 0% 100%, color-stop(1%, #eeeeee), color-stop(15%, #ffffff)); + background-image: -webkit-linear-gradient(top, #eeeeee 1%, #ffffff 15%); + background-image: -moz-linear-gradient(top, #eeeeee 1%, #ffffff 15%); + background-image: -o-linear-gradient(top, #eeeeee 1%, #ffffff 15%); + background-image: -ms-linear-gradient(top, #eeeeee 1%, #ffffff 15%); + background-image: linear-gradient(top, #eeeeee 1%, #ffffff 15%); + border: 1px solid #aaa; + margin: 0; + padding: 0; + cursor: text; + overflow: hidden; + height: auto !important; + height: 1%; + position: relative; +} + +.select2-container-multi .select2-drop { + margin-top:0; +} + +.select2-container-multi.select2-container-active .select2-choices { + -webkit-box-shadow: 0 0 5px rgba(0,0,0,.3); + -moz-box-shadow : 0 0 5px rgba(0,0,0,.3); + -o-box-shadow : 0 0 5px rgba(0,0,0,.3); + box-shadow : 0 0 5px rgba(0,0,0,.3); + border: 1px solid #5897fb; + outline: none; +} +.select2-container-multi .select2-choices li { + float: left; + list-style: none; +} +.select2-container-multi .select2-choices .select2-search-field { + white-space: nowrap; + margin: 0; + padding: 0; +} + +.select2-container-multi .select2-choices .select2-search-field input { + color: #666; + background: transparent !important; + font-family: sans-serif; + font-size: 100%; + height: 15px; + padding: 5px; + margin: 1px 0; + outline: 0; + border: 0; + -webkit-box-shadow: none; + -moz-box-shadow : none; + -o-box-shadow : none; + box-shadow : none; +} + + +.select2-default { + color: #999 !important; +} + +.select2-container-multi .select2-choices .select2-search-choice { + -webkit-border-radius: 3px; + -moz-border-radius : 3px; + border-radius : 3px; + -moz-background-clip : padding; + -webkit-background-clip: padding-box; + background-clip : padding-box; + background-color: #e4e4e4; + filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#f4f4f4', endColorstr='#eeeeee', GradientType=0 ); + background-image: -webkit-gradient(linear, 0% 0%, 0% 100%, color-stop(20%, #f4f4f4), color-stop(50%, #f0f0f0), color-stop(52%, #e8e8e8), color-stop(100%, #eeeeee)); + background-image: -webkit-linear-gradient(top, #f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eeeeee 100%); + background-image: -moz-linear-gradient(top, #f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eeeeee 100%); + background-image: -o-linear-gradient(top, #f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eeeeee 100%); + background-image: -ms-linear-gradient(top, #f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eeeeee 100%); + background-image: linear-gradient(top, #f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eeeeee 100%); + -webkit-box-shadow: 0 0 2px #ffffff inset, 0 1px 0 rgba(0,0,0,0.05); + -moz-box-shadow : 0 0 2px #ffffff inset, 0 1px 0 rgba(0,0,0,0.05); + box-shadow : 0 0 2px #ffffff inset, 0 1px 0 rgba(0,0,0,0.05); + color: #333; + border: 1px solid #aaaaaa; + line-height: 13px; + padding: 3px 5px 3px 18px; + margin: 3px 0 3px 5px; + position: relative; + cursor: default; +} +.select2-container-multi .select2-choices .select2-search-choice span { + cursor: default; +} +.select2-container-multi .select2-choices .select2-search-choice-focus { + background: #d4d4d4; +} + +.select2-search-choice-close { + display: block; + position: absolute; + right: 3px; + top: 4px; + width: 12px; + height: 13px; + font-size: 1px; + background: url(select2.png) right top no-repeat; + outline: none; +} + +.select2-container-multi .select2-search-choice-close { + left: 3px; +} + + +.select2-container-multi .select2-choices .select2-search-choice .select2-search-choice-close:hover { + background-position: right -11px; +} +.select2-container-multi .select2-choices .select2-search-choice-focus .select2-search-choice-close { + background-position: right -11px; +} + + +.select2-container-multi .select2-results { + margin: -1px 0 0; + padding: 0; +} + +/* end multiselect */ diff --git a/src/dashboard/src/media/vendor/select2/select2.js b/src/dashboard/src/media/vendor/select2/select2.js new file mode 100644 index 0000000000..e5bc30a13a --- /dev/null +++ b/src/dashboard/src/media/vendor/select2/select2.js @@ -0,0 +1,1473 @@ +/* + Copyright 2012 Igor Vaynberg + + Version: 2.0 Timestamp: Wed, May 16, 2012 10:38:37 AM + + Licensed under the Apache License, Version 2.0 (the "License"); you may not use this work except in + compliance with the License. You may obtain a copy of the License in the LICENSE file, or at: + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software distributed under the License is + distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and limitations under the License. + */ +(function ($, undefined) { + "use strict"; + /*global document, window, jQuery, console */ + + if (window.Select2 !== undefined) { + return; + } + + var KEY, AbstractSelect2, SingleSelect2, MultiSelect2; + + KEY = { + TAB: 9, + ENTER: 13, + ESC: 27, + SPACE: 32, + LEFT: 37, + UP: 38, + RIGHT: 39, + DOWN: 40, + SHIFT: 16, + CTRL: 17, + ALT: 18, + PAGE_UP: 33, + PAGE_DOWN: 34, + HOME: 36, + END: 35, + BACKSPACE: 8, + DELETE: 46, + isArrow: function (k) { + k = k.which ? k.which : k; + switch (k) { + case KEY.LEFT: + case KEY.RIGHT: + case KEY.UP: + case KEY.DOWN: + return true; + } + return false; + }, + isControl: function (k) { + k = k.which ? k.which : k; + switch (k) { + case KEY.SHIFT: + case KEY.CTRL: + case KEY.ALT: + return true; + } + return false; + }, + isFunctionKey: function (k) { + k = k.which ? k.which : k; + return k >= 112 && k <= 123; + } + }; + + function indexOf(value, array) { + var i = 0, l = array.length, v; + + if (value.constructor === String) { + for (; i < l; i = i + 1) if (value.localeCompare(array[i]) === 0) return i; + } else { + for (; i < l; i = i + 1) { + v = array[i]; + if (v.constructor === String) { + if (v.localeCompare(value) === 0) return i; + } else { + if (v === value) return i; + } + } + } + return -1; + } + + /** + * Compares equality of a and b taking into account that a and b may be strings, in which case localCompare is used + * @param a + * @param b + */ + function equal(a, b) { + if (a === b) return true; + if (a === undefined || b === undefined) return false; + if (a === null || b === null) return false; + if (a.constructor === String) return a.localeCompare(b) === 0; + if (b.constructor === String) return b.localeCompare(a) === 0; + return false; + } + + /** + * Splits the string into an array of values, trimming each value. An empty array is returned for nulls or empty + * strings + * @param string + * @param separator + */ + function splitVal(string, separator) { + var val, i, l; + if (string === null || string.length < 1) return []; + val = string.split(separator); + for (i = 0, l = val.length; i < l; i = i + 1) val[i] = $.trim(val[i]); + return val; + } + + function getSideBorderPadding(element) { + return element.outerWidth() - element.width(); + } + + function installKeyUpChangeEvent(element) { + element.bind("keydown", function () { + element.data("keyup-change-value", element.val()); + }); + element.bind("keyup", function () { + if (element.val() !== element.data("keyup-change-value")) { + element.trigger("keyup-change"); + } + }); + } + + /** + * filters mouse events so an event is fired only if the mouse moved. + * + * filters out mouse events that occur when mouse is stationary but + * the elements under the pointer are scrolled. + */ + $(document).delegate("*", "mousemove", function (e) { + $(document).data("select2-lastpos", {x: e.pageX, y: e.pageY}); + }); + function installFilteredMouseMove(element) { + element.bind("mousemove", function (e) { + var lastpos = $(document).data("select2-lastpos"); + if (lastpos === undefined || lastpos.x !== e.pageX || lastpos.y !== e.pageY) { + $(e.target).trigger("mousemove-filtered", e); + } + }); + } + + /** + * Debounces a function. Returns a function that calls the original fn function only if no invocations have been made + * within the last quietMillis milliseconds. + * + * @param quietMillis number of milliseconds to wait before invoking fn + * @param fn function to be debounced + * @return debounced version of fn + */ + function debounce(quietMillis, fn) { + var timeout; + return function () { + window.clearTimeout(timeout); + timeout = window.setTimeout(fn, quietMillis); + }; + } + + function installDebouncedScroll(threshold, element) { + var notify = debounce(threshold, function (e) { element.trigger("scroll-debounced", e);}); + element.bind("scroll", function (e) { + if (indexOf(e.target, element.get()) >= 0) notify(e); + }); + } + + function killEvent(event) { + event.preventDefault(); + event.stopPropagation(); + } + + function measureTextWidth(e) { + var sizer, width; + sizer = $("
    ").css({ + position: "absolute", + left: "-1000px", + top: "-1000px", + display: "none", + fontSize: e.css("fontSize"), + fontFamily: e.css("fontFamily"), + fontStyle: e.css("fontStyle"), + fontWeight: e.css("fontWeight"), + letterSpacing: e.css("letterSpacing"), + textTransform: e.css("textTransform"), + whiteSpace: "nowrap" + }); + sizer.text(e.val()); + $("body").append(sizer); + width = sizer.width(); + sizer.remove(); + return width; + } + + /** + * Produces an ajax-based query function + * + * @param options object containing configuration paramters + * @param options.transport function that will be used to execute the ajax request. must be compatible with parameters supported by $.ajax + * @param options.url url for the data + * @param options.data a function(searchTerm, pageNumber) that should return an object containing query string parameters for the above url. + * @param options.dataType request data type: ajax, jsonp, other datatatypes supported by jQuery's $.ajax function or the transport function if specified + * @param options.quietMillis (optional) milliseconds to wait before making the ajaxRequest, helps debounce the ajax function if invoked too often + * @param options.results a function(remoteData, pageNumber) that converts data returned form the remote request to the format expected by Select2. + * The expected format is an object containing the following keys: + * results array of objects that will be used as choices + * more (optional) boolean indicating whether there are more results available + * Example: {results:[{id:1, text:'Red'},{id:2, text:'Blue'}], more:true} + */ + function ajax(options) { + var timeout, // current scheduled but not yet executed request + requestSequence = 0, // sequence used to drop out-of-order responses + handler = null, + quietMillis = options.quietMillis || 100; + + return function (query) { + window.clearTimeout(timeout); + timeout = window.setTimeout(function () { + requestSequence += 1; // increment the sequence + var requestNumber = requestSequence, // this request's sequence number + data = options.data, // ajax data function + transport = options.transport || $.ajax; + + data = data.call(this, query.term, query.page); + + if( null !== handler){ + handler.abort(); + } + handler = transport.call(null, { + url: options.url, + dataType: options.dataType, + data: data, + success: function (data) { + if (requestNumber < requestSequence) { + return; + } + // TODO 3.0 - replace query.page with query so users have access to term, page, etc. + query.callback(options.results(data, query.page)); + } + }); + }, quietMillis); + }; + } + + /** + * Produces a query function that works with a local array + * + * @param options object containing configuration parameters. The options parameter can either be an array or an + * object. + * + * If the array form is used it is assumed that it contains objects with 'id' and 'text' keys. + * + * If the object form is used ti is assumed that it contains 'data' and 'text' keys. The 'data' key should contain + * an array of objects that will be used as choices. These objects must contain at least an 'id' key. The 'text' + * key can either be a String in which case it is expected that each element in the 'data' array has a key with the + * value of 'text' which will be used to match choices. Alternatively, text can be a function(item) that can extract + * the text. + */ + function local(options) { + var data = options, // data elements + text = function (item) { return item.text; }; // function used to retrieve the text portion of a data item that is matched against the search + + if (!$.isArray(data)) { + text = data.text; + // if text is not a function we assume it to be a key name + if (!$.isFunction(text)) text = function (item) { return item[data.text]; }; + data = data.results; + } + + return function (query) { + var t = query.term.toUpperCase(), filtered = {}; + if (t === "") { + query.callback({results: data}); + return; + } + filtered.results = $(data) + .filter(function () {return text(this).toUpperCase().indexOf(t) >= 0;}) + .get(); + query.callback(filtered); + }; + } + + // TODO javadoc + function tags(data) { + // TODO even for a function we should probably return a wrapper that does the same object/string check as + // the function for arrays. otherwise only functions that return objects are supported. + if ($.isFunction(data)) { + return data; + } + + // if not a function we assume it to be an array + + return function (query) { + var t = query.term.toUpperCase(), filtered = {results: []}; + $(data).each(function () { + var isObject = this.text !== undefined, + text = isObject ? this.text : this; + if (t === "" || text.toUpperCase().indexOf(t) >= 0) { + filtered.results.push(isObject ? this : {id: this, text: this}); + } + }); + query.callback(filtered); + }; + } + + /** + * blurs any Select2 container that has focus when an element outside them was clicked or received focus + */ + $(document).ready(function () { + $(document).delegate("*", "mousedown focusin", function (e) { + var target = $(e.target).closest("div.select2-container").get(0); + $(document).find("div.select2-container-active").each(function () { + if (this !== target) $(this).data("select2").blur(); + }); + }); + }); + + /** + * Creates a new class + * + * @param superClass + * @param methods + */ + function clazz(SuperClass, methods) { + var constructor = function () {}; + constructor.prototype = new SuperClass; + constructor.prototype.constructor = constructor; + constructor.prototype.parent = SuperClass.prototype; + constructor.prototype = $.extend(constructor.prototype, methods); + return constructor; + } + + AbstractSelect2 = clazz(Object, { + + bind: function (func) { + var self = this; + return function () { + func.apply(self, arguments); + }; + }, + + init: function (opts) { + var results, search, resultsSelector = ".select2-results"; + + // prepare options + this.opts = opts = this.prepareOpts(opts); + + this.id=opts.id; + + // destroy if called on an existing component + if (opts.element.data("select2") !== undefined) { + this.destroy(); + } + + this.container = this.createContainer(); + + if (opts.element.attr("class") !== undefined) { + this.container.addClass(opts.element.attr("class")); + } + + // swap container for the element + this.opts.element + .data("select2", this) + .hide() + .after(this.container); + this.container.data("select2", this); + + this.dropdown = this.container.find(".select2-drop"); + this.results = results = this.container.find(resultsSelector); + this.search = search = this.container.find("input[type=text]"); + + this.resultsPage = 0; + + // initialize the container + this.initContainer(); + + installFilteredMouseMove(this.results); + this.container.delegate(resultsSelector, "mousemove-filtered", this.bind(this.highlightUnderEvent)); + + installDebouncedScroll(80, this.results); + this.container.delegate(resultsSelector, "scroll-debounced", this.bind(this.loadMoreIfNeeded)); + + // if jquery.mousewheel plugin is installed we can prevent out-of-bounds scrolling of results via mousewheel + if ($.fn.mousewheel) { + results.mousewheel(function (e, delta, deltaX, deltaY) { + var top = results.scrollTop(), height; + if (deltaY > 0 && top - deltaY <= 0) { + results.scrollTop(0); + killEvent(e); + } else if (deltaY < 0 && results.get(0).scrollHeight - results.scrollTop() + deltaY <= results.height()) { + results.scrollTop(results.get(0).scrollHeight - results.height()); + killEvent(e); + } + }); + } + + installKeyUpChangeEvent(search); + search.bind("keyup-change", this.bind(this.updateResults)); + search.bind("focus", function () { search.addClass("select2-focused");}); + search.bind("blur", function () { search.removeClass("select2-focused");}); + + this.container.delegate(resultsSelector, "click", this.bind(function (e) { + if ($(e.target).closest(".select2-result:not(.select2-disabled)").length > 0) { + this.highlightUnderEvent(e); + this.selectHighlighted(e); + } else { + killEvent(e); + this.focusSearch(); + } + })); + + if ($.isFunction(this.opts.initSelection)) { + // initialize selection based on the current value of the source element + this.initSelection(); + + // if the user has provided a function that can set selection based on the value of the source element + // we monitor the change event on the element and trigger it, allowing for two way synchronization + this.monitorSource(); + } + }, + + destroy: function () { + var select2 = this.opts.element.data("select2"); + if (select2 !== undefined) { + select2.container.remove(); + select2.opts.element + .removeData("select2") + .unbind(".select2") + .show(); + } + }, + + prepareOpts: function (opts) { + var element, select, idKey; + + element = opts.element; + + if (element.get(0).tagName.toLowerCase() === "select") { + this.select = select = opts.element; + } + + if (select) { + // these options are not allowed when attached to a select because they are picked up off the element itself + $.each(["id", "multiple", "ajax", "query", "createSearchChoice", "initSelection", "data", "tags"], function () { + if (this in opts) { + throw new Error("Option '" + this + "' is not allowed for Select2 when attached to a " , + "
    " , + "
      " , + "
    " , + ""].join("")); + }, + + open: function () { + + if (this.opened()) return; + + this.parent.open.apply(this, arguments); + + }, + + close: function () { + if (!this.opened()) return; + this.parent.close.apply(this, arguments); + }, + + focus: function () { + this.close(); + this.selection.focus(); + }, + + isFocused: function () { + return this.selection.is(":focus"); + }, + + cancel: function () { + this.parent.cancel.apply(this, arguments); + this.selection.focus(); + }, + + initContainer: function () { + + var selection, container = this.container, clickingInside = false, + selector = ".select2-choice"; + + this.selection = selection = container.find(selector); + + this.search.bind("keydown", this.bind(function (e) { + switch (e.which) { + case KEY.UP: + case KEY.DOWN: + this.moveHighlight((e.which === KEY.UP) ? -1 : 1); + killEvent(e); + return; + case KEY.TAB: + case KEY.ENTER: + this.selectHighlighted(); + killEvent(e); + return; + case KEY.ESC: + this.cancel(e); + e.preventDefault(); + return; + } + })); + + container.delegate(selector, "click", this.bind(function (e) { + clickingInside = true; + + if (this.opened()) { + this.close(); + selection.focus(); + } else { + this.open(); + } + e.preventDefault(); + + clickingInside = false; + })); + container.delegate(selector, "keydown", this.bind(function (e) { + if (e.which === KEY.TAB || KEY.isControl(e) || KEY.isFunctionKey(e) || e.which === KEY.ESC) { + return; + } + this.open(); + if (e.which === KEY.PAGE_UP || e.which === KEY.PAGE_DOWN || e.which === KEY.SPACE) { + // prevent the page from scrolling + killEvent(e); + } + if (e.which === KEY.ENTER) { + // do not propagate the event otherwise we open, and propagate enter which closes + killEvent(e); + } + })); + container.delegate(selector, "focus", function () { container.addClass("select2-container-active"); }); + container.delegate(selector, "blur", this.bind(function () { + if (clickingInside) return; + if (!this.opened()) this.blur(); + })); + + selection.delegate("abbr", "click", this.bind(function (e) { + this.val(""); + killEvent(e); + this.close(); + this.triggerChange(); + })); + + this.setPlaceholder(); + }, + + /** + * Sets selection based on source element's value + */ + initSelection: function () { + var selected; + if (this.opts.element.val() === "") { + this.updateSelection({id: "", text: ""}); + } else { + selected = this.opts.initSelection.call(null, this.opts.element); + if (selected !== undefined && selected !== null) { + this.updateSelection(selected); + } + } + + this.close(); + this.setPlaceholder(); + }, + + prepareOpts: function () { + var opts = this.parent.prepareOpts.apply(this, arguments); + + if (opts.element.get(0).tagName.toLowerCase() === "select") { + // install sthe selection initializer + opts.initSelection = function (element) { + var selected = element.find(":selected"); + // a single select box always has a value, no need to null check 'selected' + return {id: selected.attr("value"), text: selected.text()}; + }; + } + + return opts; + }, + + setPlaceholder: function () { + var placeholder = this.getPlaceholder(); + + if (this.opts.element.val() === "" && placeholder !== undefined) { + + // check for a first blank option if attached to a select + if (this.select && this.select.find("option:first").text() !== "") return; + + if (typeof(placeholder) === "object") { + this.updateSelection(placeholder); + } else { + this.selection.find("span").html(placeholder); + } + this.selection.addClass("select2-default"); + + this.selection.find("abbr").hide(); + } + }, + + postprocessResults: function (data, initial) { + var selected = 0, self = this, showSearchInput = true; + + // find the selected element in the result list + + this.results.find(".select2-result").each(function (i) { + if (equal(self.id($(this).data("select2-data")), self.opts.element.val())) { + selected = i; + return false; + } + }); + + // and highlight it + + this.highlight(selected); + + // hide the search box if this is the first we got the results and there are a few of them + + if (initial === true) { + showSearchInput = data.results.length >= this.opts.minimumResultsForSearch; + this.search.parent().toggle(showSearchInput); + + //add "select2-with-searchbox" to the container if search box is shown + this.container[showSearchInput ? "addClass" : "removeClass"]("select2-with-searchbox"); + } + + }, + + onSelect: function (data) { + var old = this.opts.element.val(); + + this.opts.element.val(this.id(data)); + this.updateSelection(data); + this.close(); + this.selection.focus(); + + if (!equal(old, this.id(data))) { this.triggerChange(); } + }, + + updateSelection: function (data) { + this.selection + .find("span") + .html(this.opts.formatSelection(data)); + + this.selection.removeClass("select2-default"); + + if (this.opts.allowClear && this.getPlaceholder() !== undefined) { + this.selection.find("abbr").show(); + } + }, + + val: function () { + var val, data = null; + + if (arguments.length === 0) { + return this.opts.element.val(); + } + + val = arguments[0]; + + if (this.select) { + // val is an id + this.select + .val(val) + .find(":selected").each(function () { + data = {id: $(this).attr("value"), text: $(this).text()}; + return false; + }); + this.updateSelection(data); + } else { + // val is an object. !val is true for [undefined,null,''] + this.opts.element.val(!val ? "" : this.id(val)); + this.updateSelection(val); + } + this.setPlaceholder(); + + }, + + clearSearch: function () { + this.search.val(""); + } + }); + + MultiSelect2 = clazz(AbstractSelect2, { + + createContainer: function () { + return $("
    ", { + "class": "select2-container select2-container-multi", + "style": "width: " + this.getContainerWidth() + }).html([ + "
      ", + //"
    • California
    • " , + "
    • " , + " " , + "
    • " , + "
    " , + ""].join("")); + }, + + prepareOpts: function () { + var opts = this.parent.prepareOpts.apply(this, arguments); + + opts = $.extend({}, { + closeOnSelect: true + }, opts); + + // TODO validate placeholder is a string if specified + + if (opts.element.get(0).tagName.toLowerCase() === "select") { + // install sthe selection initializer + opts.initSelection = function (element) { + var data = []; + element.find(":selected").each(function () { + data.push({id: $(this).attr("value"), text: $(this).text()}); + }); + return data; + }; + } + + return opts; + }, + + initContainer: function () { + + var selector = ".select2-choices", selection; + + this.searchContainer = this.container.find(".select2-search-field"); + this.selection = selection = this.container.find(selector); + + this.search.bind("keydown", this.bind(function (e) { + if (e.which === KEY.BACKSPACE && this.search.val() === "") { + this.close(); + + var choices, + selected = selection.find(".select2-search-choice-focus"); + if (selected.length > 0) { + this.unselect(selected.first()); + this.search.width(10); + killEvent(e); + return; + } + + choices = selection.find(".select2-search-choice"); + if (choices.length > 0) { + choices.last().addClass("select2-search-choice-focus"); + } + } else { + selection.find(".select2-search-choice-focus").removeClass("select2-search-choice-focus"); + } + + if (this.opened()) { + switch (e.which) { + case KEY.UP: + case KEY.DOWN: + this.moveHighlight((e.which === KEY.UP) ? -1 : 1); + killEvent(e); + return; + case KEY.ENTER: + this.selectHighlighted(); + killEvent(e); + return; + case KEY.ESC: + this.cancel(e); + e.preventDefault(); + return; + } + } + + if (e.which === KEY.TAB || KEY.isControl(e) || KEY.isFunctionKey(e) || e.which === KEY.BACKSPACE || e.which === KEY.ESC) { + return; + } + + this.open(); + + if (e.which === KEY.PAGE_UP || e.which === KEY.PAGE_DOWN) { + // prevent the page from scrolling + killEvent(e); + } + })); + + this.search.bind("keyup", this.bind(this.resizeSearch)); + + this.container.delegate(selector, "click", this.bind(function (e) { + this.open(); + this.focusSearch(); + e.preventDefault(); + })); + + this.container.delegate(selector, "focus", this.bind(function () { + this.container.addClass("select2-container-active"); + this.clearPlaceholder(); + })); + + // set the placeholder if necessary + this.clearSearch(); + }, + + initSelection: function () { + var data; + if (this.opts.element.val() === "") { + this.updateSelection([]); + } + if (this.select || this.opts.element.val() !== "") { + data = this.opts.initSelection.call(null, this.opts.element); + if (data !== undefined && data !== null) { + this.updateSelection(data); + } + } + + this.close(); + + // set the placeholder if necessary + this.clearSearch(); + }, + + clearSearch: function () { + var placeholder = this.getPlaceholder(); + + if (placeholder !== undefined + && this.getVal().length === 0 + && this.search.hasClass("select2-focused") === false) { + + this.search.val(placeholder).addClass("select2-default"); + // stretch the search box to full width of the container so as much of the placeholder is visible as possible + this.search.width(this.getContainerWidth()); + } else { + this.search.val("").width(10); + } + }, + + clearPlaceholder: function () { + if (this.search.hasClass("select2-default")) { + this.search.val("").removeClass("select2-default"); + } + }, + + open: function () { + if (this.opened()) return; + this.parent.open.apply(this, arguments); + this.resizeSearch(); + this.focusSearch(); + }, + + close: function () { + if (!this.opened()) return; + this.parent.close.apply(this, arguments); + }, + + focus: function () { + this.close(); + this.search.focus(); + }, + + isFocused: function () { + return this.search.hasClass("select2-focused"); + }, + + updateSelection: function (data) { + var ids = [], filtered = [], self = this; + + // filter out duplicates + $(data).each(function () { + if (indexOf(self.id(this), ids) < 0) { + ids.push(self.id(this)); + filtered.push(this); + } + }); + data = filtered; + + this.selection.find(".select2-search-choice").remove(); + $(data).each(function () { + self.addSelectedChoice(this); + }); + self.postprocessResults(); + }, + + onSelect: function (data) { + this.addSelectedChoice(data); + if (this.select) { this.postprocessResults(); } + + if (this.opts.closeOnSelect) { + this.close(); + this.search.width(10); + } else { + this.search.width(10); + this.resizeSearch(); + } + + // since its not possible to select an element that has already been + // added we do not need to check if this is a new element before firing change + this.triggerChange(); + + this.focusSearch(); + }, + + cancel: function () { + this.close(); + this.focusSearch(); + }, + + addSelectedChoice: function (data) { + var choice, + id = this.id(data), + parts, + val = this.getVal(); + + parts = ["
  • ", + this.opts.formatSelection(data), + "", + "
  • " + ]; + + choice = $(parts.join("")); + choice.find("a") + .bind("click dblclick", this.bind(function (e) { + this.unselect($(e.target)); + this.selection.find(".select2-search-choice-focus").removeClass("select2-search-choice-focus"); + killEvent(e); + this.close(); + this.focusSearch(); + })).bind("focus", this.bind(function () { + this.container.addClass("select2-container-active"); + })); + + choice.data("select2-data", data); + choice.insertBefore(this.searchContainer); + + val.push(id); + this.setVal(val); + }, + + unselect: function (selected) { + var val = this.getVal(), + index; + + selected = selected.closest(".select2-search-choice"); + + if (selected.length === 0) { + throw "Invalid argument: " + selected + ". Must be .select2-search-choice"; + } + + index = indexOf(this.id(selected.data("select2-data")), val); + + if (index >= 0) { + val.splice(index, 1); + this.setVal(val); + if (this.select) this.postprocessResults(); + } + selected.remove(); + this.triggerChange(); + }, + + postprocessResults: function () { + var val = this.getVal(), + choices = this.results.find(".select2-result"), + self = this; + + choices.each(function () { + var choice = $(this), id = self.id(choice.data("select2-data")); + if (indexOf(id, val) >= 0) { + choice.addClass("select2-disabled"); + } else { + choice.removeClass("select2-disabled"); + } + }); + + choices.each(function (i) { + if (!$(this).hasClass("select2-disabled")) { + self.highlight(i); + return false; + } + }); + + }, + + resizeSearch: function () { + + var minimumWidth, left, maxWidth, containerLeft, searchWidth; + + minimumWidth = measureTextWidth(this.search) + 10; + + left = this.search.offset().left; + + maxWidth = this.selection.width(); + containerLeft = this.selection.offset().left; + + searchWidth = maxWidth - (left - containerLeft) - getSideBorderPadding(this.search); + + if (searchWidth < minimumWidth) { + searchWidth = maxWidth - getSideBorderPadding(this.search); + } + + if (searchWidth < 40) { + searchWidth = maxWidth - getSideBorderPadding(this.search); + } + this.search.width(searchWidth); + }, + + getVal: function () { + var val; + if (this.select) { + val = this.select.val(); + return val === null ? [] : val; + } else { + val = this.opts.element.val(); + return splitVal(val, ","); + } + }, + + setVal: function (val) { + var unique = []; + if (this.select) { + this.select.val(val); + } else { + // filter out duplicates + $(val).each(function () { + if (indexOf(this, unique) < 0) unique.push(this); + }); + + this.opts.element.val(unique.length === 0 ? "" : unique.join(",")); + } + }, + + val: function () { + var val, data = [], self=this; + + if (arguments.length === 0) { + return this.getVal(); + } + + val = arguments[0]; + + if (this.select) { + // val is a list of ids + this.setVal(val); + this.select.find(":selected").each(function () { + data.push({id: $(this).attr("value"), text: $(this).text()}); + }); + this.updateSelection(data); + } else { + val = (val === null) ? [] : val; + this.setVal(val); + // val is a list of objects + + $(val).each(function () { data.push(self.id(this)); }); + this.setVal(data); + this.updateSelection(val); + } + + this.clearSearch(); + } + }); + + $.fn.select2 = function () { + + var args = Array.prototype.slice.call(arguments, 0), + opts, + select2, + value, multiple, allowedMethods = ["val", "destroy", "open", "close", "focus", "isFocused"]; + + this.each(function () { + if (args.length === 0 || typeof(args[0]) === "object") { + opts = args.length === 0 ? {} : $.extend({}, args[0]); + opts.element = $(this); + + if (opts.element.get(0).tagName.toLowerCase() === "select") { + multiple = opts.element.attr("multiple"); + } else { + multiple = opts.multiple || false; + if ("tags" in opts) {opts.multiple = multiple = true;} + } + + select2 = multiple ? new MultiSelect2() : new SingleSelect2(); + select2.init(opts); + } else if (typeof(args[0]) === "string") { + + if (indexOf(args[0], allowedMethods) < 0) { + throw "Unknown method: " + args[0]; + } + + value = undefined; + select2 = $(this).data("select2"); + if (select2 === undefined) return; + value = select2[args[0]].apply(select2, args.slice(1)); + if (value !== undefined) {return false;} + } else { + throw "Invalid arguments to select2 plugin: " + args; + } + }); + return (value === undefined) ? this : value; + }; + + // exports + window.Select2 = { + query: { + ajax: ajax, + local: local, + tags: tags + }, util: { + debounce: debounce + }, "class": { + "abstract": AbstractSelect2, + "single": SingleSelect2, + "multi": MultiSelect2 + } + }; + +}(jQuery)); diff --git a/src/dashboard/src/media/vendor/select2/select2.min.js b/src/dashboard/src/media/vendor/select2/select2.min.js new file mode 100644 index 0000000000..e240bb5c67 --- /dev/null +++ b/src/dashboard/src/media/vendor/select2/select2.min.js @@ -0,0 +1,55 @@ +/* +Copyright 2012 Igor Vaynberg + +Version: 2.0 Timestamp: Wed, May 16, 2012 10:38:37 AM + +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this work except in +compliance with the License. You may obtain a copy of the License in the LICENSE file, or at: + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software distributed under the License is +distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and limitations under the License.*/ +(function(e,g){function j(a,b){var c=0,f=b.length,h;if(a.constructor===String)for(;ca.length)return[];c=a.split(b);f=0;for(h=c.length;f< +h;f+=1)c[f]=e.trim(c[f]);return c}function w(a){a.bind("keydown",function(){a.data("keyup-change-value",a.val())});a.bind("keyup",function(){a.val()!==a.data("keyup-change-value")&&a.trigger("keyup-change")})}function x(a){a.bind("mousemove",function(a){var c=e(document).data("select2-lastpos");(c===g||c.x!==a.pageX||c.y!==a.pageY)&&e(a.target).trigger("mousemove-filtered",a)})}function s(a,b){var c;return function(){window.clearTimeout(c);c=window.setTimeout(b,a)}}function y(a,b){var c=s(a,function(a){b.trigger("scroll-debounced", +a)});b.bind("scroll",function(a){0<=j(a.target,b.get())&&c(a)})}function i(a){a.preventDefault();a.stopPropagation()}function t(a){var b,c=0,f=null,h=a.quietMillis||100;return function(d){window.clearTimeout(b);b=window.setTimeout(function(){var b=c+=1,h=a.data,g=a.transport||e.ajax,h=h.call(this,d.term,d.page);null!==f&&f.abort();f=g.call(null,{url:a.url,dataType:a.dataType,data:h,success:function(f){b=0}).get();a.callback(d)}}}function v(a){return e.isFunction(a)?a:function(b){var c=b.term.toUpperCase(),f={results:[]};e(a).each(function(){var a=this.text!==g,b=a?this.text:this;if(""===c||0<=b.toUpperCase().indexOf(c))f.results.push(a?this:{id:this, +text:this})});b.callback(f)}}function o(a,b){var c=function(){};c.prototype=new a;c.prototype.constructor=c;c.prototype.parent=a.prototype;c.prototype=e.extend(c.prototype,b);return c}if(window.Select2===g){var d,m,p,q;d={TAB:9,ENTER:13,ESC:27,SPACE:32,LEFT:37,UP:38,RIGHT:39,DOWN:40,SHIFT:16,CTRL:17,ALT:18,PAGE_UP:33,PAGE_DOWN:34,HOME:36,END:35,BACKSPACE:8,DELETE:46,isArrow:function(a){a=a.which?a.which:a;switch(a){case d.LEFT:case d.RIGHT:case d.UP:case d.DOWN:return!0}return!1},isControl:function(a){a= +a.which?a.which:a;switch(a){case d.SHIFT:case d.CTRL:case d.ALT:return!0}return!1},isFunctionKey:function(a){a=a.which?a.which:a;return 112<=a&&123>=a}};e(document).delegate("*","mousemove",function(a){e(document).data("select2-lastpos",{x:a.pageX,y:a.pageY})});e(document).ready(function(){e(document).delegate("*","mousedown focusin",function(a){var b=e(a.target).closest("div.select2-container").get(0);e(document).find("div.select2-container-active").each(function(){this!==b&&e(this).data("select2").blur()})})}); +m=o(Object,{bind:function(a){var b=this;return function(){a.apply(b,arguments)}},init:function(a){var b,c;this.opts=a=this.prepareOpts(a);this.id=a.id;a.element.data("select2")!==g&&this.destroy();this.container=this.createContainer();a.element.attr("class")!==g&&this.container.addClass(a.element.attr("class"));this.opts.element.data("select2",this).hide().after(this.container);this.container.data("select2",this);this.dropdown=this.container.find(".select2-drop");this.results=b=this.container.find(".select2-results"); +this.search=c=this.container.find("input[type=text]");this.resultsPage=0;this.initContainer();x(this.results);this.container.delegate(".select2-results","mousemove-filtered",this.bind(this.highlightUnderEvent));y(80,this.results);this.container.delegate(".select2-results","scroll-debounced",this.bind(this.loadMoreIfNeeded));e.fn.mousewheel&&b.mousewheel(function(a,c,e,d){c=b.scrollTop();0=c-d?(b.scrollTop(0),i(a)):0>d&&b.get(0).scrollHeight-b.scrollTop()+d<=b.height()&&(b.scrollTop(b.get(0).scrollHeight- +b.height()),i(a))});w(c);c.bind("keyup-change",this.bind(this.updateResults));c.bind("focus",function(){c.addClass("select2-focused")});c.bind("blur",function(){c.removeClass("select2-focused")});this.container.delegate(".select2-results","click",this.bind(function(a){0 element.");});a=e.extend({},{formatResult:function(a){return a.text}, +formatSelection:function(a){return a.text},formatNoMatches:function(){return"No matches found"},formatInputTooShort:function(a,b){return"Please enter "+(b-a.length)+" more characters"},minimumResultsForSearch:0,minimumInputLength:0,id:function(a){return a.id}},a);"function"!==typeof a.id&&(f=a.id,a.id=function(a){return a[f]});c?(a.query=this.bind(function(a){var c={results:[],more:false},f=a.term.toUpperCase(),d=this.getPlaceholder();b.find("option").each(function(a){var b=e(this),h=b.text();if(a=== +0&&d!==g&&h==="")return true;h.toUpperCase().indexOf(f)>=0&&c.results.push({id:b.attr("value"),text:h})});a.callback(c)}),a.id=function(a){return a.id}):"query"in a||("ajax"in a?a.query=t(a.ajax):"data"in a?a.query=u(a.data):"tags"in a&&(a.query=v(a.tags),a.createSearchChoice=function(a){return{id:a,text:a}},a.initSelection=function(a){var b=[];e(r(a.val(),",")).each(function(){b.push({id:this,text:this})});return b}));if("function"!==typeof a.query)throw"query function not defined for Select2 "+ +a.element.attr("id");return a},monitorSource:function(){this.opts.element.bind("change.select2",this.bind(function(){!0!==this.opts.element.data("select2-change-triggered")&&this.initSelection()}))},triggerChange:function(){this.opts.element.data("select2-change-triggered",!0);this.opts.element.trigger("change");this.opts.element.data("select2-change-triggered",!1)},opened:function(){return this.container.hasClass("select2-dropdown-open")},open:function(){this.opened()||(this.container.addClass("select2-dropdown-open").addClass("select2-container-active"), +this.updateResults(!0),this.dropdown.show(),this.focusSearch())},close:function(){this.opened()&&(this.dropdown.hide(),this.container.removeClass("select2-dropdown-open"),this.results.empty(),this.clearSearch())},clearSearch:function(){},ensureHighlightVisible:function(){var a=this.results,b,c,f,d;b=a.children(".select2-result");c=this.highlight();0>c||(f=e(b[c]),d=f.offset().top+f.outerHeight(),c===b.length-1&&(b=a.find("li.select2-more-results"),0b&&a.scrollTop(a.scrollTop()+(d-b)),f=f.offset().top-a.offset().top,0>f&&a.scrollTop(a.scrollTop()+f))},moveHighlight:function(a){for(var b=this.results.children(".select2-result"),c=this.highlight();-1=b.length&&(a=b.length-1);0>a&&(a=0);e(b[a]).addClass("select2-highlighted");this.ensureHighlightVisible();this.opened()&&this.focusSearch()},highlightUnderEvent:function(a){a=e(a.target).closest(".select2-result");0=c&&(b.addClass("select2-active"),this.opts.query({term:this.search.val(), +page:d,callback:this.bind(function(c){var k=[],i=this;e(c.results).each(function(){k.push("
  • ");k.push(i.opts.formatResult(this));k.push("
  • ")});b.before(k.join(""));a.find(".select2-result").each(function(a){var b=e(this);b.data("select2-data")!==g?f=a:b.data("select2-data",c.results[a-f-1])});c.more?b.removeClass("select2-active"):b.remove();this.resultsPage=d})})))},updateResults:function(a){function b(a){f.html(a);f.scrollTop(0);c.removeClass("select2-active")}var c= +this.search,f=this.results,d=this.opts,i=this;c.addClass("select2-active");c.val().length"+d.formatInputTooShort(c.val(),d.minimumInputLength)+""):(this.resultsPage=1,d.query({term:c.val(),page:this.resultsPage,callback:this.bind(function(k){var j=[],l;this.opts.createSearchChoice&&""!==c.val()&&(l=this.opts.createSearchChoice.call(null,c.val(),k.results),l!==g&&null!==l&&i.id(l)!==g&&null!==i.id(l)&&0===e(k.results).filter(function(){return n(i.id(this), +i.id(l))}).length&&k.results.unshift(l));0===k.results.length?b("
  • "+d.formatNoMatches(c.val())+"
  • "):(e(k.results).each(function(){j.push("
  • ");j.push(d.formatResult(this));j.push("
  • ")}),!0===k.more&&j.push("
  • Loading more results...
  • "),b(j.join("")),f.children(".select2-result").each(function(a){a=k.results[a];e(this).data("select2-data",a)}),this.postprocessResults(k,a))})}))},cancel:function(){this.close()}, +blur:function(){window.setTimeout(this.bind(function(){this.close();this.container.removeClass("select2-container-active");this.clearSearch();this.selection.find(".select2-search-choice-focus").removeClass("select2-search-choice-focus");this.search.blur()}),10)},focusSearch:function(){window.setTimeout(this.bind(function(){this.search.focus()}),10)},selectHighlighted:function(){var a=this.results.find(".select2-highlighted:not(.select2-disabled)").data("select2-data");if(a)this.onSelect(a)},getPlaceholder:function(){return this.opts.element.attr("placeholder")|| +this.opts.element.data("placeholder")||this.opts.placeholder},getContainerWidth:function(){var a,b,c,f;if(this.opts.width!==g)return this.opts.width;a=this.opts.element.attr("style");if(a!==g){a=a.split(";");c=0;for(f=a.length;c",{"class":"select2-container",style:"width: "+ +this.getContainerWidth()}).html("
    ")},open:function(){this.opened()||this.parent.open.apply(this,arguments)},close:function(){this.opened()&&this.parent.close.apply(this, +arguments)},focus:function(){this.close();this.selection.focus()},isFocused:function(){return this.selection.is(":focus")},cancel:function(){this.parent.cancel.apply(this,arguments);this.selection.focus()},initContainer:function(){var a,b=this.container,c=!1;this.selection=a=b.find(".select2-choice");this.search.bind("keydown",this.bind(function(a){switch(a.which){case d.UP:case d.DOWN:this.moveHighlight(a.which===d.UP?-1:1);i(a);break;case d.TAB:case d.ENTER:this.selectHighlighted();i(a);break;case d.ESC:this.cancel(a), +a.preventDefault()}}));b.delegate(".select2-choice","click",this.bind(function(b){c=!0;this.opened()?(this.close(),a.focus()):this.open();b.preventDefault();c=!1}));b.delegate(".select2-choice","keydown",this.bind(function(a){a.which===d.TAB||(d.isControl(a)||d.isFunctionKey(a)||a.which===d.ESC)||(this.open(),(a.which===d.PAGE_UP||a.which===d.PAGE_DOWN||a.which===d.SPACE)&&i(a),a.which===d.ENTER&&i(a))}));b.delegate(".select2-choice","focus",function(){b.addClass("select2-container-active")});b.delegate(".select2-choice", +"blur",this.bind(function(){c||this.opened()||this.blur()}));a.delegate("abbr","click",this.bind(function(a){this.val("");i(a);this.close();this.triggerChange()}));this.setPlaceholder()},initSelection:function(){var a;""===this.opts.element.val()?this.updateSelection({id:"",text:""}):(a=this.opts.initSelection.call(null,this.opts.element),a!==g&&null!==a&&this.updateSelection(a));this.close();this.setPlaceholder()},prepareOpts:function(){var a=this.parent.prepareOpts.apply(this,arguments);"select"=== +a.element.get(0).tagName.toLowerCase()&&(a.initSelection=function(a){a=a.find(":selected");return{id:a.attr("value"),text:a.text()}});return a},setPlaceholder:function(){var a=this.getPlaceholder();""===this.opts.element.val()&&a!==g&&!(this.select&&""!==this.select.find("option:first").text())&&("object"===typeof a?this.updateSelection(a):this.selection.find("span").html(a),this.selection.addClass("select2-default"),this.selection.find("abbr").hide())},postprocessResults:function(a,b){var c=0,f= +this,d=!0;this.results.find(".select2-result").each(function(a){if(n(f.id(e(this).data("select2-data")),f.opts.element.val()))return c=a,!1});this.highlight(c);!0===b&&(d=a.results.length>=this.opts.minimumResultsForSearch,this.search.parent().toggle(d),this.container[d?"addClass":"removeClass"]("select2-with-searchbox"))},onSelect:function(a){var b=this.opts.element.val();this.opts.element.val(this.id(a));this.updateSelection(a);this.close();this.selection.focus();n(b,this.id(a))||this.triggerChange()}, +updateSelection:function(a){this.selection.find("span").html(this.opts.formatSelection(a));this.selection.removeClass("select2-default");this.opts.allowClear&&this.getPlaceholder()!==g&&this.selection.find("abbr").show()},val:function(){var a,b=null;if(0===arguments.length)return this.opts.element.val();a=arguments[0];this.select?(this.select.val(a).find(":selected").each(function(){b={id:e(this).attr("value"),text:e(this).text()};return!1}),this.updateSelection(b)):(this.opts.element.val(!a?"":this.id(a)), +this.updateSelection(a));this.setPlaceholder()},clearSearch:function(){this.search.val("")}});q=o(m,{createContainer:function(){return e("
    ",{"class":"select2-container select2-container-multi",style:"width: "+this.getContainerWidth()}).html("
    ")},prepareOpts:function(){var a= +this.parent.prepareOpts.apply(this,arguments),a=e.extend({},{closeOnSelect:!0},a);"select"===a.element.get(0).tagName.toLowerCase()&&(a.initSelection=function(a){var c=[];a.find(":selected").each(function(){c.push({id:e(this).attr("value"),text:e(this).text()})});return c});return a},initContainer:function(){var a;this.searchContainer=this.container.find(".select2-search-field");this.selection=a=this.container.find(".select2-choices");this.search.bind("keydown",this.bind(function(b){if(b.which=== +d.BACKSPACE&&""===this.search.val()){this.close();var c;c=a.find(".select2-search-choice-focus");if(0j(d.id(this),b)&&(b.push(d.id(this)),c.push(this))});a=c;this.selection.find(".select2-search-choice").remove();e(a).each(function(){d.addSelectedChoice(this)});d.postprocessResults()},onSelect:function(a){this.addSelectedChoice(a);this.select&&this.postprocessResults();this.opts.closeOnSelect?(this.close(),this.search.width(10)):(this.search.width(10),this.resizeSearch());this.triggerChange();this.focusSearch()},cancel:function(){this.close();this.focusSearch()},addSelectedChoice:function(a){var b, +c=this.id(a),d=this.getVal();b=["
  • ",this.opts.formatSelection(a),"","
  • "];b=e(b.join(""));b.find("a").bind("click dblclick",this.bind(function(a){this.unselect(e(a.target));this.selection.find(".select2-search-choice-focus").removeClass("select2-search-choice-focus");i(a);this.close();this.focusSearch()})).bind("focus",this.bind(function(){this.container.addClass("select2-container-active")})); +b.data("select2-data",a);b.insertBefore(this.searchContainer);d.push(c);this.setVal(d)},unselect:function(a){var b=this.getVal(),c,a=a.closest(".select2-search-choice");if(0===a.length)throw"Invalid argument: "+a+". Must be .select2-search-choice";c=j(this.id(a.data("select2-data")),b);0<=c&&(b.splice(c,1),this.setVal(b),this.select&&this.postprocessResults());a.remove();this.triggerChange()},postprocessResults:function(){var a=this.getVal(),b=this.results.find(".select2-result"),c=this;b.each(function(){var b= +e(this),d=c.id(b.data("select2-data"));0<=j(d,a)?b.addClass("select2-disabled"):b.removeClass("select2-disabled")});b.each(function(a){if(!e(this).hasClass("select2-disabled"))return c.highlight(a),!1})},resizeSearch:function(){var a,b,c,d;c=this.search;a=e("
    ").css({position:"absolute",left:"-1000px",top:"-1000px",display:"none",fontSize:c.css("fontSize"),fontFamily:c.css("fontFamily"),fontStyle:c.css("fontStyle"),fontWeight:c.css("fontWeight"),letterSpacing:c.css("letterSpacing"),textTransform:c.css("textTransform"), +whiteSpace:"nowrap"});a.text(c.val());e("body").append(a);c=a.width();a.remove();a=c+10;b=this.search.offset().left;c=this.selection.width();d=this.selection.offset().left;b=c-(b-d)-(this.search.outerWidth()-this.search.width());bb&&(b=c-(this.search.outerWidth()-this.search.width()));this.search.width(b)},getVal:function(){var a;if(this.select)return a=this.select.val(),null===a?[]:a;a=this.opts.element.val();return r(a,",")},setVal:function(a){var b= +[];this.select?this.select.val(a):(e(a).each(function(){0>j(this,b)&&b.push(this)}),this.opts.element.val(0===b.length?"":b.join(",")))},val:function(){var a,b=[],c=this;if(0===arguments.length)return this.getVal();a=arguments[0];this.select?(this.setVal(a),this.select.find(":selected").each(function(){b.push({id:e(this).attr("value"),text:e(this).text()})}),this.updateSelection(b)):(a=null===a?[]:a,this.setVal(a),e(a).each(function(){b.push(c.id(this))}),this.setVal(b),this.updateSelection(a));this.clearSearch()}}); +e.fn.select2=function(){var a=Array.prototype.slice.call(arguments,0),b,c,d,h,i="val destroy open close focus isFocused".split(" ");this.each(function(){if(0===a.length||"object"===typeof a[0])b=0===a.length?{}:e.extend({},a[0]),b.element=e(this),"select"===b.element.get(0).tagName.toLowerCase()?h=b.element.attr("multiple"):(h=b.multiple||!1,"tags"in b&&(b.multiple=h=!0)),c=h?new q:new p,c.init(b);else if("string"===typeof a[0]){if(0>j(a[0],i))throw"Unknown method: "+a[0];d=g;c=e(this).data("select2"); +if(c!==g&&(d=c[a[0]].apply(c,a.slice(1)),d!==g))return!1}else throw"Invalid arguments to select2 plugin: "+a;});return d===g?this:d};window.Select2={query:{ajax:t,local:u,tags:v},util:{debounce:s},"class":{"abstract":m,single:p,multi:q}}}})(jQuery); diff --git a/src/dashboard/src/media/vendor/select2/select2.png b/src/dashboard/src/media/vendor/select2/select2.png new file mode 100644 index 0000000000..d08e4b7e62 Binary files /dev/null and b/src/dashboard/src/media/vendor/select2/select2.png differ diff --git a/src/dashboard/src/media/vendor/select2/spinner.gif b/src/dashboard/src/media/vendor/select2/spinner.gif new file mode 100644 index 0000000000..5b33f7e54f Binary files /dev/null and b/src/dashboard/src/media/vendor/select2/spinner.gif differ diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/.gitignore b/src/dashboard/src/media/vendor/twitter-bootstrap/.gitignore new file mode 100644 index 0000000000..fa611d84b2 --- /dev/null +++ b/src/dashboard/src/media/vendor/twitter-bootstrap/.gitignore @@ -0,0 +1,3 @@ +*~ +.DS_Store +thumbs.db diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/LICENSE b/src/dashboard/src/media/vendor/twitter-bootstrap/LICENSE new file mode 100644 index 0000000000..2bb9ad240f --- /dev/null +++ b/src/dashboard/src/media/vendor/twitter-bootstrap/LICENSE @@ -0,0 +1,176 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/Makefile b/src/dashboard/src/media/vendor/twitter-bootstrap/Makefile new file mode 100644 index 0000000000..16177554ea --- /dev/null +++ b/src/dashboard/src/media/vendor/twitter-bootstrap/Makefile @@ -0,0 +1,30 @@ +VERSION=1.4.0 +DATE=$(shell DATE) +BOOTSTRAP = ./bootstrap.css +BOOTSTRAP_MIN = ./bootstrap.min.css +BOOTSTRAP_LESS = ./lib/bootstrap.less +LESS_COMPRESSOR ?= `which lessc` +WATCHR ?= `which watchr` + +build: + @@if test ! -z ${LESS_COMPRESSOR}; then \ + sed -e 's/@VERSION/'"v${VERSION}"'/' -e 's/@DATE/'"${DATE}"'/' <${BOOTSTRAP_LESS} >${BOOTSTRAP_LESS}.tmp; \ + lessc ${BOOTSTRAP_LESS}.tmp > ${BOOTSTRAP}; \ + lessc ${BOOTSTRAP_LESS}.tmp > ${BOOTSTRAP_MIN} --compress; \ + rm -f ${BOOTSTRAP_LESS}.tmp; \ + echo "Bootstrap successfully built! - `date`"; \ + else \ + echo "You must have the LESS compiler installed in order to build Bootstrap."; \ + echo "You can install it by running: npm install less -g"; \ + fi + +watch: + @@if test ! -z ${WATCHR}; then \ + echo "Watching less files..."; \ + watchr -e "watch('lib/.*\.less') { system 'make' }"; \ + else \ + echo "You must have the watchr installed in order to watch Bootstrap less files."; \ + echo "You can install it by running: gem install watchr"; \ + fi + +.PHONY: build watch \ No newline at end of file diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/README.md b/src/dashboard/src/media/vendor/twitter-bootstrap/README.md new file mode 100644 index 0000000000..e05a25f736 --- /dev/null +++ b/src/dashboard/src/media/vendor/twitter-bootstrap/README.md @@ -0,0 +1,105 @@ +TWITTER BOOTSTRAP +================= + +Bootstrap is Twitter's toolkit for kickstarting CSS for websites, apps, and more. It includes base CSS styles for typography, forms, buttons, tables, grids, navigation, alerts, and more. + +To get started -- checkout http://twitter.github.com/bootstrap! + + +Usage +----- + +You can use Twitter Bootstrap in one of two ways: just drop the compiled CSS into any new project and start cranking, or run LESS on your site and compile on the fly like a boss. + +Here's what the LESS version looks like: + +``` html + + +``` + +Or if you prefer, the standard css way: + +``` html + +``` + +For more info, refer to the docs! + + +Versioning +---------- + +For transparency and insight into our release cycle, and for striving to maintain backwards compatibility, Bootstrap will be maintained under the Semantic Versioning guidelines as much as possible. + +Releases will be numbered with the follow format: + +`..` + +And constructed with the following guidelines: + +* Breaking backwards compatibility bumps the major +* New additions without breaking backwards compatibility bumps the minor +* Bug fixes and misc changes bump the patch + +For more information on SemVer, please visit http://semver.org/. + + +Bug tracker +----------- + +Have a bug? Please create an issue here on GitHub! + +https://github.com/twitter/bootstrap/issues + + +Twitter account +--------------- + +Keep up to date on announcements and more by following Bootstrap on Twitter, @TwBootstrap. + + +Mailing list +------------ + +Have a question? Ask on our mailing list! + +twitter-bootstrap@googlegroups.com + +http://groups.google.com/group/twitter-bootstrap + + +Developers +---------- + +We have included a makefile with convenience methods for working with the bootstrap library. + ++ **build** - `make build` +This will run the less compiler on the bootstrap lib and generate a bootstrap.css and bootstrap.min.css file. +The lessc compiler is required for this command to run. + ++ **watch** - `make watch` +This is a convenience method for watching your less files and automatically building them whenever you save. +Watchr is required for this command to run. + + +Authors +------- + +**Mark Otto** + ++ http://twitter.com/mdo ++ http://github.com/markdotto + +**Jacob Thornton** + ++ http://twitter.com/fat ++ http://github.com/fat + + +License +--------------------- + +Copyright 2011 Twitter, Inc. + +Licensed under the Apache License, Version 2.0: http://www.apache.org/licenses/LICENSE-2.0 \ No newline at end of file diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/bootstrap.css b/src/dashboard/src/media/vendor/twitter-bootstrap/bootstrap.css new file mode 100644 index 0000000000..8cc8b2824b --- /dev/null +++ b/src/dashboard/src/media/vendor/twitter-bootstrap/bootstrap.css @@ -0,0 +1,2467 @@ +/*! + * Bootstrap v1.4.0 + * + * Copyright 2011 Twitter, Inc + * Licensed under the Apache License v2.0 + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Designed and built with all the love in the world @twitter by @mdo and @fat. + * Date: Fri Nov 4 13:44:15 PDT 2011 + */ +/* Reset.less + * Props to Eric Meyer (meyerweb.com) for his CSS reset file. We're using an adapted version here that cuts out some of the reset HTML elements we will never need here (i.e., dfn, samp, etc). + * ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- */ +html, body { + margin: 0; + padding: 0; +} +h1, +h2, +h3, +h4, +h5, +h6, +p, +blockquote, +pre, +a, +abbr, +acronym, +address, +cite, +code, +del, +dfn, +em, +img, +q, +s, +samp, +small, +strike, +strong, +sub, +sup, +tt, +var, +dd, +dl, +dt, +li, +ol, +ul, +fieldset, +form, +label, +legend, +button, +table, +caption, +tbody, +tfoot, +thead, +tr, +th, +td { + margin: 0; + padding: 0; + border: 0; + font-weight: normal; + font-style: normal; + font-size: 100%; + line-height: 1; + font-family: inherit; +} +table { + border-collapse: collapse; + border-spacing: 0; +} +ol, ul { + list-style: none; +} +q:before, +q:after, +blockquote:before, +blockquote:after { + content: ""; +} +html { + overflow-y: scroll; + font-size: 100%; + -webkit-text-size-adjust: 100%; + -ms-text-size-adjust: 100%; +} +a:focus { + outline: thin dotted; +} +a:hover, a:active { + outline: 0; +} +article, +aside, +details, +figcaption, +figure, +footer, +header, +hgroup, +nav, +section { + display: block; +} +audio, canvas, video { + display: inline-block; + *display: inline; + *zoom: 1; +} +audio:not([controls]) { + display: none; +} +sub, sup { + font-size: 75%; + line-height: 0; + position: relative; + vertical-align: baseline; +} +sup { + top: -0.5em; +} +sub { + bottom: -0.25em; +} +img { + border: 0; + -ms-interpolation-mode: bicubic; +} +button, +input, +select, +textarea { + font-size: 100%; + margin: 0; + vertical-align: baseline; + *vertical-align: middle; +} +button, input { + line-height: normal; + *overflow: visible; +} +button::-moz-focus-inner, input::-moz-focus-inner { + border: 0; + padding: 0; +} +button, +input[type="button"], +input[type="reset"], +input[type="submit"] { + cursor: pointer; + -webkit-appearance: button; +} +input[type="search"] { + -webkit-appearance: textfield; + -webkit-box-sizing: content-box; + -moz-box-sizing: content-box; + box-sizing: content-box; +} +input[type="search"]::-webkit-search-decoration { + -webkit-appearance: none; +} +textarea { + overflow: auto; + vertical-align: top; +} +/* Variables.less + * Variables to customize the look and feel of Bootstrap + * ----------------------------------------------------- */ +/* Mixins.less + * Snippets of reusable CSS to develop faster and keep code readable + * ----------------------------------------------------------------- */ +/* + * Scaffolding + * Basic and global styles for generating a grid system, structural layout, and page templates + * ------------------------------------------------------------------------------------------- */ +body { + background-color: #ffffff; + margin: 0; + font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; + font-size: 13px; + font-weight: normal; + line-height: 18px; + color: #404040; +} +.container { + width: 940px; + margin-left: auto; + margin-right: auto; + zoom: 1; +} +.container:before, .container:after { + display: table; + content: ""; + zoom: 1; +} +.container:after { + clear: both; +} +.container-fluid { + position: relative; + min-width: 940px; + padding-left: 20px; + padding-right: 20px; + zoom: 1; +} +.container-fluid:before, .container-fluid:after { + display: table; + content: ""; + zoom: 1; +} +.container-fluid:after { + clear: both; +} +.container-fluid > .sidebar { + position: absolute; + top: 0; + left: 20px; + width: 220px; +} +.container-fluid > .content { + margin-left: 240px; +} +a { + color: #0069d6; + text-decoration: none; + line-height: inherit; + font-weight: inherit; +} +a:hover { + color: #00438a; + text-decoration: underline; +} +.pull-right { + float: right; +} +.pull-left { + float: left; +} +.hide { + display: none; +} +.show { + display: block; +} +.row { + zoom: 1; + margin-left: -20px; +} +.row:before, .row:after { + display: table; + content: ""; + zoom: 1; +} +.row:after { + clear: both; +} +.row > [class*="span"] { + display: inline; + float: left; + margin-left: 20px; +} +.span1 { + width: 40px; +} +.span2 { + width: 100px; +} +.span3 { + width: 160px; +} +.span4 { + width: 220px; +} +.span5 { + width: 280px; +} +.span6 { + width: 340px; +} +.span7 { + width: 400px; +} +.span8 { + width: 460px; +} +.span9 { + width: 520px; +} +.span10 { + width: 580px; +} +.span11 { + width: 640px; +} +.span12 { + width: 700px; +} +.span13 { + width: 760px; +} +.span14 { + width: 820px; +} +.span15 { + width: 880px; +} +.span16 { + width: 940px; +} +.span17 { + width: 1000px; +} +.span18 { + width: 1060px; +} +.span19 { + width: 1120px; +} +.span20 { + width: 1180px; +} +.span21 { + width: 1240px; +} +.span22 { + width: 1300px; +} +.span23 { + width: 1360px; +} +.span24 { + width: 1420px; +} +.row > .offset1 { + margin-left: 80px; +} +.row > .offset2 { + margin-left: 140px; +} +.row > .offset3 { + margin-left: 200px; +} +.row > .offset4 { + margin-left: 260px; +} +.row > .offset5 { + margin-left: 320px; +} +.row > .offset6 { + margin-left: 380px; +} +.row > .offset7 { + margin-left: 440px; +} +.row > .offset8 { + margin-left: 500px; +} +.row > .offset9 { + margin-left: 560px; +} +.row > .offset10 { + margin-left: 620px; +} +.row > .offset11 { + margin-left: 680px; +} +.row > .offset12 { + margin-left: 740px; +} +.span-one-third { + width: 300px; +} +.span-two-thirds { + width: 620px; +} +.offset-one-third { + margin-left: 340px; +} +.offset-two-thirds { + margin-left: 660px; +} +/* Typography.less + * Headings, body text, lists, code, and more for a versatile and durable typography system + * ---------------------------------------------------------------------------------------- */ +p { + font-size: 13px; + font-weight: normal; + line-height: 18px; + margin-bottom: 9px; +} +p small { + font-size: 11px; + color: #bfbfbf; +} +h1, +h2, +h3, +h4, +h5, +h6 { + font-weight: bold; + color: #404040; +} +h1 small, +h2 small, +h3 small, +h4 small, +h5 small, +h6 small { + color: #bfbfbf; +} +h1 { + margin-bottom: 18px; + font-size: 30px; + line-height: 36px; +} +h1 small { + font-size: 18px; +} +h2 { + font-size: 24px; + line-height: 36px; +} +h2 small { + font-size: 14px; +} +h3, +h4, +h5, +h6 { + line-height: 36px; +} +h3 { + font-size: 18px; +} +h3 small { + font-size: 14px; +} +h4 { + font-size: 16px; +} +h4 small { + font-size: 12px; +} +h5 { + font-size: 14px; +} +h6 { + font-size: 13px; + color: #bfbfbf; + text-transform: uppercase; +} +ul, ol { + margin: 0 0 18px 25px; +} +ul ul, +ul ol, +ol ol, +ol ul { + margin-bottom: 0; +} +ul { + list-style: disc; +} +ol { + list-style: decimal; +} +li { + line-height: 18px; + color: #808080; +} +ul.unstyled { + list-style: none; + margin-left: 0; +} +dl { + margin-bottom: 18px; +} +dl dt, dl dd { + line-height: 18px; +} +dl dt { + font-weight: bold; +} +dl dd { + margin-left: 9px; +} +hr { + margin: 20px 0 19px; + border: 0; + border-bottom: 1px solid #eee; +} +strong { + font-style: inherit; + font-weight: bold; +} +em { + font-style: italic; + font-weight: inherit; + line-height: inherit; +} +.muted { + color: #bfbfbf; +} +blockquote { + margin-bottom: 18px; + border-left: 5px solid #eee; + padding-left: 15px; +} +blockquote p { + font-size: 14px; + font-weight: 300; + line-height: 18px; + margin-bottom: 0; +} +blockquote small { + display: block; + font-size: 12px; + font-weight: 300; + line-height: 18px; + color: #bfbfbf; +} +blockquote small:before { + content: '\2014 \00A0'; +} +address { + display: block; + line-height: 18px; + margin-bottom: 18px; +} +code, pre { + padding: 0 3px 2px; + font-family: Monaco, Andale Mono, Courier New, monospace; + font-size: 12px; + -webkit-border-radius: 3px; + -moz-border-radius: 3px; + border-radius: 3px; +} +code { + background-color: #fee9cc; + color: rgba(0, 0, 0, 0.75); + padding: 1px 3px; +} +pre { + background-color: #f5f5f5; + display: block; + padding: 8.5px; + margin: 0 0 18px; + line-height: 18px; + font-size: 12px; + border: 1px solid #ccc; + border: 1px solid rgba(0, 0, 0, 0.15); + -webkit-border-radius: 3px; + -moz-border-radius: 3px; + border-radius: 3px; + white-space: pre; + white-space: pre-wrap; + word-wrap: break-word; +} +/* Forms.less + * Base styles for various input types, form layouts, and states + * ------------------------------------------------------------- */ +form { + margin-bottom: 18px; +} +fieldset { + margin-bottom: 18px; + padding-top: 18px; +} +fieldset legend { + display: block; + padding-left: 150px; + font-size: 19.5px; + line-height: 1; + color: #404040; + *padding: 0 0 5px 145px; + /* IE6-7 */ + + *line-height: 1.5; + /* IE6-7 */ + +} +form .clearfix { + margin-bottom: 18px; + zoom: 1; +} +form .clearfix:before, form .clearfix:after { + display: table; + content: ""; + zoom: 1; +} +form .clearfix:after { + clear: both; +} +label, +input, +select, +textarea { + font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; + font-size: 13px; + font-weight: normal; + line-height: normal; +} +label { + padding-top: 6px; + font-size: 13px; + line-height: 18px; + float: left; + width: 130px; + text-align: right; + color: #404040; +} +form .input { + margin-left: 150px; +} +input[type=checkbox], input[type=radio] { + cursor: pointer; +} +input, +textarea, +select, +.uneditable-input { + display: inline-block; + width: 210px; + height: 18px; + padding: 4px; + font-size: 13px; + line-height: 18px; + color: #808080; + border: 1px solid #ccc; + -webkit-border-radius: 3px; + -moz-border-radius: 3px; + border-radius: 3px; +} +select { + padding: initial; +} +input[type=checkbox], input[type=radio] { + width: auto; + height: auto; + padding: 0; + margin: 3px 0; + *margin-top: 0; + /* IE6-7 */ + + line-height: normal; + border: none; +} +input[type=file] { + background-color: #ffffff; + padding: initial; + border: initial; + line-height: initial; + -webkit-box-shadow: none; + -moz-box-shadow: none; + box-shadow: none; +} +input[type=button], input[type=reset], input[type=submit] { + width: auto; + height: auto; +} +select, input[type=file] { + height: 27px; + *height: auto; + line-height: 27px; + *margin-top: 4px; + /* For IE7, add top margin to align select with labels */ + +} +select[multiple] { + height: inherit; + background-color: #ffffff; +} +textarea { + height: auto; +} +.uneditable-input { + background-color: #ffffff; + display: block; + border-color: #eee; + -webkit-box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.025); + -moz-box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.025); + box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.025); + cursor: not-allowed; +} +:-moz-placeholder { + color: #bfbfbf; +} +::-webkit-input-placeholder { + color: #bfbfbf; +} +input, textarea { + -webkit-transition: border linear 0.2s, box-shadow linear 0.2s; + -moz-transition: border linear 0.2s, box-shadow linear 0.2s; + -ms-transition: border linear 0.2s, box-shadow linear 0.2s; + -o-transition: border linear 0.2s, box-shadow linear 0.2s; + transition: border linear 0.2s, box-shadow linear 0.2s; + -webkit-box-shadow: inset 0 1px 3px rgba(0, 0, 0, 0.1); + -moz-box-shadow: inset 0 1px 3px rgba(0, 0, 0, 0.1); + box-shadow: inset 0 1px 3px rgba(0, 0, 0, 0.1); +} +input:focus, textarea:focus { + outline: 0; + border-color: rgba(82, 168, 236, 0.8); + -webkit-box-shadow: inset 0 1px 3px rgba(0, 0, 0, 0.1), 0 0 8px rgba(82, 168, 236, 0.6); + -moz-box-shadow: inset 0 1px 3px rgba(0, 0, 0, 0.1), 0 0 8px rgba(82, 168, 236, 0.6); + box-shadow: inset 0 1px 3px rgba(0, 0, 0, 0.1), 0 0 8px rgba(82, 168, 236, 0.6); +} +input[type=file]:focus, input[type=checkbox]:focus, select:focus { + -webkit-box-shadow: none; + -moz-box-shadow: none; + box-shadow: none; + outline: 1px dotted #666; +} +form .clearfix.error > label, form .clearfix.error .help-block, form .clearfix.error .help-inline { + color: #b94a48; +} +form .clearfix.error input, form .clearfix.error textarea { + color: #b94a48; + border-color: #ee5f5b; +} +form .clearfix.error input:focus, form .clearfix.error textarea:focus { + border-color: #e9322d; + -webkit-box-shadow: 0 0 6px #f8b9b7; + -moz-box-shadow: 0 0 6px #f8b9b7; + box-shadow: 0 0 6px #f8b9b7; +} +form .clearfix.error .input-prepend .add-on, form .clearfix.error .input-append .add-on { + color: #b94a48; + background-color: #fce6e6; + border-color: #b94a48; +} +form .clearfix.warning > label, form .clearfix.warning .help-block, form .clearfix.warning .help-inline { + color: #c09853; +} +form .clearfix.warning input, form .clearfix.warning textarea { + color: #c09853; + border-color: #ccae64; +} +form .clearfix.warning input:focus, form .clearfix.warning textarea:focus { + border-color: #be9a3f; + -webkit-box-shadow: 0 0 6px #e5d6b1; + -moz-box-shadow: 0 0 6px #e5d6b1; + box-shadow: 0 0 6px #e5d6b1; +} +form .clearfix.warning .input-prepend .add-on, form .clearfix.warning .input-append .add-on { + color: #c09853; + background-color: #d2b877; + border-color: #c09853; +} +form .clearfix.success > label, form .clearfix.success .help-block, form .clearfix.success .help-inline { + color: #468847; +} +form .clearfix.success input, form .clearfix.success textarea { + color: #468847; + border-color: #57a957; +} +form .clearfix.success input:focus, form .clearfix.success textarea:focus { + border-color: #458845; + -webkit-box-shadow: 0 0 6px #9acc9a; + -moz-box-shadow: 0 0 6px #9acc9a; + box-shadow: 0 0 6px #9acc9a; +} +form .clearfix.success .input-prepend .add-on, form .clearfix.success .input-append .add-on { + color: #468847; + background-color: #bcddbc; + border-color: #468847; +} +.input-mini, +input.mini, +textarea.mini, +select.mini { + width: 60px; +} +.input-small, +input.small, +textarea.small, +select.small { + width: 90px; +} +.input-medium, +input.medium, +textarea.medium, +select.medium { + width: 150px; +} +.input-large, +input.large, +textarea.large, +select.large { + width: 210px; +} +.input-xlarge, +input.xlarge, +textarea.xlarge, +select.xlarge { + width: 270px; +} +.input-xxlarge, +input.xxlarge, +textarea.xxlarge, +select.xxlarge { + width: 530px; +} +textarea.xxlarge { + overflow-y: auto; +} +input.span1, textarea.span1 { + display: inline-block; + float: none; + width: 30px; + margin-left: 0; +} +input.span2, textarea.span2 { + display: inline-block; + float: none; + width: 90px; + margin-left: 0; +} +input.span3, textarea.span3 { + display: inline-block; + float: none; + width: 150px; + margin-left: 0; +} +input.span4, textarea.span4 { + display: inline-block; + float: none; + width: 210px; + margin-left: 0; +} +input.span5, textarea.span5 { + display: inline-block; + float: none; + width: 270px; + margin-left: 0; +} +input.span6, textarea.span6 { + display: inline-block; + float: none; + width: 330px; + margin-left: 0; +} +input.span7, textarea.span7 { + display: inline-block; + float: none; + width: 390px; + margin-left: 0; +} +input.span8, textarea.span8 { + display: inline-block; + float: none; + width: 450px; + margin-left: 0; +} +input.span9, textarea.span9 { + display: inline-block; + float: none; + width: 510px; + margin-left: 0; +} +input.span10, textarea.span10 { + display: inline-block; + float: none; + width: 570px; + margin-left: 0; +} +input.span11, textarea.span11 { + display: inline-block; + float: none; + width: 630px; + margin-left: 0; +} +input.span12, textarea.span12 { + display: inline-block; + float: none; + width: 690px; + margin-left: 0; +} +input.span13, textarea.span13 { + display: inline-block; + float: none; + width: 750px; + margin-left: 0; +} +input.span14, textarea.span14 { + display: inline-block; + float: none; + width: 810px; + margin-left: 0; +} +input.span15, textarea.span15 { + display: inline-block; + float: none; + width: 870px; + margin-left: 0; +} +input.span16, textarea.span16 { + display: inline-block; + float: none; + width: 930px; + margin-left: 0; +} +input[disabled], +select[disabled], +textarea[disabled], +input[readonly], +select[readonly], +textarea[readonly] { + background-color: #f5f5f5; + border-color: #ddd; + cursor: not-allowed; +} +.actions { + background: #f5f5f5; + margin-top: 18px; + margin-bottom: 18px; + padding: 17px 20px 18px 150px; + border-top: 1px solid #ddd; + -webkit-border-radius: 0 0 3px 3px; + -moz-border-radius: 0 0 3px 3px; + border-radius: 0 0 3px 3px; +} +.actions .secondary-action { + float: right; +} +.actions .secondary-action a { + line-height: 30px; +} +.actions .secondary-action a:hover { + text-decoration: underline; +} +.help-inline, .help-block { + font-size: 13px; + line-height: 18px; + color: #bfbfbf; +} +.help-inline { + padding-left: 5px; + *position: relative; + /* IE6-7 */ + + *top: -5px; + /* IE6-7 */ + +} +.help-block { + display: block; + max-width: 600px; +} +.inline-inputs { + color: #808080; +} +.inline-inputs span { + padding: 0 2px 0 1px; +} +.input-prepend input, .input-append input { + -webkit-border-radius: 0 3px 3px 0; + -moz-border-radius: 0 3px 3px 0; + border-radius: 0 3px 3px 0; +} +.input-prepend .add-on, .input-append .add-on { + position: relative; + background: #f5f5f5; + border: 1px solid #ccc; + z-index: 2; + float: left; + display: block; + width: auto; + min-width: 16px; + height: 18px; + padding: 4px 4px 4px 5px; + margin-right: -1px; + font-weight: normal; + line-height: 18px; + color: #bfbfbf; + text-align: center; + text-shadow: 0 1px 0 #ffffff; + -webkit-border-radius: 3px 0 0 3px; + -moz-border-radius: 3px 0 0 3px; + border-radius: 3px 0 0 3px; +} +.input-prepend .active, .input-append .active { + background: #a9dba9; + border-color: #46a546; +} +.input-prepend .add-on { + *margin-top: 1px; + /* IE6-7 */ + +} +.input-append input { + float: left; + -webkit-border-radius: 3px 0 0 3px; + -moz-border-radius: 3px 0 0 3px; + border-radius: 3px 0 0 3px; +} +.input-append .add-on { + -webkit-border-radius: 0 3px 3px 0; + -moz-border-radius: 0 3px 3px 0; + border-radius: 0 3px 3px 0; + margin-right: 0; + margin-left: -1px; +} +.inputs-list { + margin: 0 0 5px; + width: 100%; +} +.inputs-list li { + display: block; + padding: 0; + width: 100%; +} +.inputs-list label { + display: block; + float: none; + width: auto; + padding: 0; + margin-left: 20px; + line-height: 18px; + text-align: left; + white-space: normal; +} +.inputs-list label strong { + color: #808080; +} +.inputs-list label small { + font-size: 11px; + font-weight: normal; +} +.inputs-list .inputs-list { + margin-left: 25px; + margin-bottom: 10px; + padding-top: 0; +} +.inputs-list:first-child { + padding-top: 6px; +} +.inputs-list li + li { + padding-top: 2px; +} +.inputs-list input[type=radio], .inputs-list input[type=checkbox] { + margin-bottom: 0; + margin-left: -20px; + float: left; +} +.form-stacked { + padding-left: 20px; +} +.form-stacked fieldset { + padding-top: 9px; +} +.form-stacked legend { + padding-left: 0; +} +.form-stacked label { + display: block; + float: none; + width: auto; + font-weight: bold; + text-align: left; + line-height: 20px; + padding-top: 0; +} +.form-stacked .clearfix { + margin-bottom: 9px; +} +.form-stacked .clearfix div.input { + margin-left: 0; +} +.form-stacked .inputs-list { + margin-bottom: 0; +} +.form-stacked .inputs-list li { + padding-top: 0; +} +.form-stacked .inputs-list li label { + font-weight: normal; + padding-top: 0; +} +.form-stacked div.clearfix.error { + padding-top: 10px; + padding-bottom: 10px; + padding-left: 10px; + margin-top: 0; + margin-left: -10px; +} +.form-stacked .actions { + margin-left: -20px; + padding-left: 20px; +} +/* + * Tables.less + * Tables for, you guessed it, tabular data + * ---------------------------------------- */ +table { + width: 100%; + margin-bottom: 18px; + padding: 0; + font-size: 13px; + border-collapse: collapse; +} +table th, table td { + padding: 10px 10px 9px; + line-height: 18px; + text-align: left; +} +table th { + padding-top: 9px; + font-weight: bold; + vertical-align: middle; +} +table td { + vertical-align: top; + border-top: 1px solid #ddd; +} +table tbody th { + border-top: 1px solid #ddd; + vertical-align: top; +} +.condensed-table th, .condensed-table td { + padding: 5px 5px 4px; +} +.bordered-table { + border: 1px solid #ddd; + border-collapse: separate; + *border-collapse: collapse; + /* IE7, collapse table to remove spacing */ + + -webkit-border-radius: 4px; + -moz-border-radius: 4px; + border-radius: 4px; +} +.bordered-table th + th, .bordered-table td + td, .bordered-table th + td { + border-left: 1px solid #ddd; +} +.bordered-table thead tr:first-child th:first-child, .bordered-table tbody tr:first-child td:first-child { + -webkit-border-radius: 4px 0 0 0; + -moz-border-radius: 4px 0 0 0; + border-radius: 4px 0 0 0; +} +.bordered-table thead tr:first-child th:last-child, .bordered-table tbody tr:first-child td:last-child { + -webkit-border-radius: 0 4px 0 0; + -moz-border-radius: 0 4px 0 0; + border-radius: 0 4px 0 0; +} +.bordered-table tbody tr:last-child td:first-child { + -webkit-border-radius: 0 0 0 4px; + -moz-border-radius: 0 0 0 4px; + border-radius: 0 0 0 4px; +} +.bordered-table tbody tr:last-child td:last-child { + -webkit-border-radius: 0 0 4px 0; + -moz-border-radius: 0 0 4px 0; + border-radius: 0 0 4px 0; +} +table .span1 { + width: 20px; +} +table .span2 { + width: 60px; +} +table .span3 { + width: 100px; +} +table .span4 { + width: 140px; +} +table .span5 { + width: 180px; +} +table .span6 { + width: 220px; +} +table .span7 { + width: 260px; +} +table .span8 { + width: 300px; +} +table .span9 { + width: 340px; +} +table .span10 { + width: 380px; +} +table .span11 { + width: 420px; +} +table .span12 { + width: 460px; +} +table .span13 { + width: 500px; +} +table .span14 { + width: 540px; +} +table .span15 { + width: 580px; +} +table .span16 { + width: 620px; +} +.zebra-striped tbody tr:nth-child(odd) td, .zebra-striped tbody tr:nth-child(odd) th { + background-color: #f9f9f9; +} +.zebra-striped tbody tr:hover td, .zebra-striped tbody tr:hover th { + background-color: #f5f5f5; +} +table .header { + cursor: pointer; +} +table .header:after { + content: ""; + float: right; + margin-top: 7px; + border-width: 0 4px 4px; + border-style: solid; + border-color: #000 transparent; + visibility: hidden; +} +table .headerSortUp, table .headerSortDown { + background-color: rgba(141, 192, 219, 0.25); + text-shadow: 0 1px 1px rgba(255, 255, 255, 0.75); +} +table .header:hover:after { + visibility: visible; +} +table .headerSortDown:after, table .headerSortDown:hover:after { + visibility: visible; + filter: alpha(opacity=60); + -khtml-opacity: 0.6; + -moz-opacity: 0.6; + opacity: 0.6; +} +table .headerSortUp:after { + border-bottom: none; + border-left: 4px solid transparent; + border-right: 4px solid transparent; + border-top: 4px solid #000; + visibility: visible; + -webkit-box-shadow: none; + -moz-box-shadow: none; + box-shadow: none; + filter: alpha(opacity=60); + -khtml-opacity: 0.6; + -moz-opacity: 0.6; + opacity: 0.6; +} +table .blue { + color: #049cdb; + border-bottom-color: #049cdb; +} +table .headerSortUp.blue, table .headerSortDown.blue { + background-color: #ade6fe; +} +table .green { + color: #46a546; + border-bottom-color: #46a546; +} +table .headerSortUp.green, table .headerSortDown.green { + background-color: #cdeacd; +} +table .red { + color: #9d261d; + border-bottom-color: #9d261d; +} +table .headerSortUp.red, table .headerSortDown.red { + background-color: #f4c8c5; +} +table .yellow { + color: #ffc40d; + border-bottom-color: #ffc40d; +} +table .headerSortUp.yellow, table .headerSortDown.yellow { + background-color: #fff6d9; +} +table .orange { + color: #f89406; + border-bottom-color: #f89406; +} +table .headerSortUp.orange, table .headerSortDown.orange { + background-color: #fee9cc; +} +table .purple { + color: #7a43b6; + border-bottom-color: #7a43b6; +} +table .headerSortUp.purple, table .headerSortDown.purple { + background-color: #e2d5f0; +} +/* Patterns.less + * Repeatable UI elements outside the base styles provided from the scaffolding + * ---------------------------------------------------------------------------- */ +.topbar { + height: 40px; + position: fixed; + top: 0; + left: 0; + right: 0; + z-index: 10000; + overflow: visible; +} +.topbar a { + color: #bfbfbf; + text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); +} +.topbar h3 a:hover, .topbar .brand:hover, .topbar ul .active > a { + background-color: #333; + background-color: rgba(255, 255, 255, 0.05); + color: #ffffff; + text-decoration: none; +} +.topbar h3 { + position: relative; +} +.topbar h3 a, .topbar .brand { + float: left; + display: block; + padding: 8px 20px 12px; + margin-left: -20px; + color: #ffffff; + font-size: 20px; + font-weight: 200; + line-height: 1; +} +.topbar p { + margin: 0; + line-height: 40px; +} +.topbar p a:hover { + background-color: transparent; + color: #ffffff; +} +.topbar form { + float: left; + margin: 5px 0 0 0; + position: relative; + filter: alpha(opacity=100); + -khtml-opacity: 1; + -moz-opacity: 1; + opacity: 1; +} +.topbar form.pull-right { + float: right; +} +.topbar input { + background-color: #444; + background-color: rgba(255, 255, 255, 0.3); + font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; + font-size: normal; + font-weight: 13px; + line-height: 1; + padding: 4px 9px; + color: #ffffff; + color: rgba(255, 255, 255, 0.75); + border: 1px solid #111; + -webkit-border-radius: 4px; + -moz-border-radius: 4px; + border-radius: 4px; + -webkit-box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1), 0 1px 0px rgba(255, 255, 255, 0.25); + -moz-box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1), 0 1px 0px rgba(255, 255, 255, 0.25); + box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1), 0 1px 0px rgba(255, 255, 255, 0.25); + -webkit-transition: none; + -moz-transition: none; + -ms-transition: none; + -o-transition: none; + transition: none; +} +.topbar input:-moz-placeholder { + color: #e6e6e6; +} +.topbar input::-webkit-input-placeholder { + color: #e6e6e6; +} +.topbar input:hover { + background-color: #bfbfbf; + background-color: rgba(255, 255, 255, 0.5); + color: #ffffff; +} +.topbar input:focus, .topbar input.focused { + outline: 0; + background-color: #ffffff; + color: #404040; + text-shadow: 0 1px 0 #ffffff; + border: 0; + padding: 5px 10px; + -webkit-box-shadow: 0 0 3px rgba(0, 0, 0, 0.15); + -moz-box-shadow: 0 0 3px rgba(0, 0, 0, 0.15); + box-shadow: 0 0 3px rgba(0, 0, 0, 0.15); +} +.topbar-inner, .topbar .fill { + background-color: #222; + background-color: #222222; + background-repeat: repeat-x; + background-image: -khtml-gradient(linear, left top, left bottom, from(#333333), to(#222222)); + background-image: -moz-linear-gradient(top, #333333, #222222); + background-image: -ms-linear-gradient(top, #333333, #222222); + background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #333333), color-stop(100%, #222222)); + background-image: -webkit-linear-gradient(top, #333333, #222222); + background-image: -o-linear-gradient(top, #333333, #222222); + background-image: linear-gradient(top, #333333, #222222); + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#333333', endColorstr='#222222', GradientType=0); + -webkit-box-shadow: 0 1px 3px rgba(0, 0, 0, 0.25), inset 0 -1px 0 rgba(0, 0, 0, 0.1); + -moz-box-shadow: 0 1px 3px rgba(0, 0, 0, 0.25), inset 0 -1px 0 rgba(0, 0, 0, 0.1); + box-shadow: 0 1px 3px rgba(0, 0, 0, 0.25), inset 0 -1px 0 rgba(0, 0, 0, 0.1); +} +.topbar div > ul, .nav { + display: block; + float: left; + margin: 0 10px 0 0; + position: relative; + left: 0; +} +.topbar div > ul > li, .nav > li { + display: block; + float: left; +} +.topbar div > ul a, .nav a { + display: block; + float: none; + padding: 10px 10px 11px; + line-height: 19px; + text-decoration: none; +} +.topbar div > ul a:hover, .nav a:hover { + color: #ffffff; + text-decoration: none; +} +.topbar div > ul .active > a, .nav .active > a { + background-color: #222; + background-color: rgba(0, 0, 0, 0.5); +} +.topbar div > ul.secondary-nav, .nav.secondary-nav { + float: right; + margin-left: 10px; + margin-right: 0; +} +.topbar div > ul.secondary-nav .menu-dropdown, +.nav.secondary-nav .menu-dropdown, +.topbar div > ul.secondary-nav .dropdown-menu, +.nav.secondary-nav .dropdown-menu { + right: 0; + border: 0; +} +.topbar div > ul a.menu:hover, +.nav a.menu:hover, +.topbar div > ul li.open .menu, +.nav li.open .menu, +.topbar div > ul .dropdown-toggle:hover, +.nav .dropdown-toggle:hover, +.topbar div > ul .dropdown.open .dropdown-toggle, +.nav .dropdown.open .dropdown-toggle { + background: #444; + background: rgba(255, 255, 255, 0.05); +} +.topbar div > ul .menu-dropdown, +.nav .menu-dropdown, +.topbar div > ul .dropdown-menu, +.nav .dropdown-menu { + background-color: #333; +} +.topbar div > ul .menu-dropdown a.menu, +.nav .menu-dropdown a.menu, +.topbar div > ul .dropdown-menu a.menu, +.nav .dropdown-menu a.menu, +.topbar div > ul .menu-dropdown .dropdown-toggle, +.nav .menu-dropdown .dropdown-toggle, +.topbar div > ul .dropdown-menu .dropdown-toggle, +.nav .dropdown-menu .dropdown-toggle { + color: #ffffff; +} +.topbar div > ul .menu-dropdown a.menu.open, +.nav .menu-dropdown a.menu.open, +.topbar div > ul .dropdown-menu a.menu.open, +.nav .dropdown-menu a.menu.open, +.topbar div > ul .menu-dropdown .dropdown-toggle.open, +.nav .menu-dropdown .dropdown-toggle.open, +.topbar div > ul .dropdown-menu .dropdown-toggle.open, +.nav .dropdown-menu .dropdown-toggle.open { + background: #444; + background: rgba(255, 255, 255, 0.05); +} +.topbar div > ul .menu-dropdown li a, +.nav .menu-dropdown li a, +.topbar div > ul .dropdown-menu li a, +.nav .dropdown-menu li a { + color: #999; + text-shadow: 0 1px 0 rgba(0, 0, 0, 0.5); +} +.topbar div > ul .menu-dropdown li a:hover, +.nav .menu-dropdown li a:hover, +.topbar div > ul .dropdown-menu li a:hover, +.nav .dropdown-menu li a:hover { + background-color: #191919; + background-repeat: repeat-x; + background-image: -khtml-gradient(linear, left top, left bottom, from(#292929), to(#191919)); + background-image: -moz-linear-gradient(top, #292929, #191919); + background-image: -ms-linear-gradient(top, #292929, #191919); + background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #292929), color-stop(100%, #191919)); + background-image: -webkit-linear-gradient(top, #292929, #191919); + background-image: -o-linear-gradient(top, #292929, #191919); + background-image: linear-gradient(top, #292929, #191919); + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#292929', endColorstr='#191919', GradientType=0); + color: #ffffff; +} +.topbar div > ul .menu-dropdown .active a, +.nav .menu-dropdown .active a, +.topbar div > ul .dropdown-menu .active a, +.nav .dropdown-menu .active a { + color: #ffffff; +} +.topbar div > ul .menu-dropdown .divider, +.nav .menu-dropdown .divider, +.topbar div > ul .dropdown-menu .divider, +.nav .dropdown-menu .divider { + background-color: #222; + border-color: #444; +} +.topbar ul .menu-dropdown li a, .topbar ul .dropdown-menu li a { + padding: 4px 15px; +} +li.menu, .dropdown { + position: relative; +} +a.menu:after, .dropdown-toggle:after { + width: 0; + height: 0; + display: inline-block; + content: "↓"; + text-indent: -99999px; + vertical-align: top; + margin-top: 8px; + margin-left: 4px; + border-left: 4px solid transparent; + border-right: 4px solid transparent; + border-top: 4px solid #ffffff; + filter: alpha(opacity=50); + -khtml-opacity: 0.5; + -moz-opacity: 0.5; + opacity: 0.5; +} +.menu-dropdown, .dropdown-menu { + background-color: #ffffff; + float: left; + display: none; + position: absolute; + top: 40px; + z-index: 900; + min-width: 160px; + max-width: 220px; + _width: 160px; + margin-left: 0; + margin-right: 0; + padding: 6px 0; + zoom: 1; + border-color: #999; + border-color: rgba(0, 0, 0, 0.2); + border-style: solid; + border-width: 0 1px 1px; + -webkit-border-radius: 0 0 6px 6px; + -moz-border-radius: 0 0 6px 6px; + border-radius: 0 0 6px 6px; + -webkit-box-shadow: 0 2px 4px rgba(0, 0, 0, 0.2); + -moz-box-shadow: 0 2px 4px rgba(0, 0, 0, 0.2); + box-shadow: 0 2px 4px rgba(0, 0, 0, 0.2); + -webkit-background-clip: padding-box; + -moz-background-clip: padding-box; + background-clip: padding-box; +} +.menu-dropdown li, .dropdown-menu li { + float: none; + display: block; + background-color: none; +} +.menu-dropdown .divider, .dropdown-menu .divider { + height: 1px; + margin: 5px 0; + overflow: hidden; + background-color: #eee; + border-bottom: 1px solid #ffffff; +} +.topbar .dropdown-menu a, .dropdown-menu a { + display: block; + padding: 4px 15px; + clear: both; + font-weight: normal; + line-height: 18px; + color: #808080; + text-shadow: 0 1px 0 #ffffff; +} +.topbar .dropdown-menu a:hover, +.dropdown-menu a:hover, +.topbar .dropdown-menu a.hover, +.dropdown-menu a.hover { + background-color: #dddddd; + background-repeat: repeat-x; + background-image: -khtml-gradient(linear, left top, left bottom, from(#eeeeee), to(#dddddd)); + background-image: -moz-linear-gradient(top, #eeeeee, #dddddd); + background-image: -ms-linear-gradient(top, #eeeeee, #dddddd); + background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #eeeeee), color-stop(100%, #dddddd)); + background-image: -webkit-linear-gradient(top, #eeeeee, #dddddd); + background-image: -o-linear-gradient(top, #eeeeee, #dddddd); + background-image: linear-gradient(top, #eeeeee, #dddddd); + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#eeeeee', endColorstr='#dddddd', GradientType=0); + color: #404040; + text-decoration: none; + -webkit-box-shadow: inset 0 1px 0 rgba(0, 0, 0, 0.025), inset 0 -1px rgba(0, 0, 0, 0.025); + -moz-box-shadow: inset 0 1px 0 rgba(0, 0, 0, 0.025), inset 0 -1px rgba(0, 0, 0, 0.025); + box-shadow: inset 0 1px 0 rgba(0, 0, 0, 0.025), inset 0 -1px rgba(0, 0, 0, 0.025); +} +.open .menu, +.dropdown.open .menu, +.open .dropdown-toggle, +.dropdown.open .dropdown-toggle { + color: #ffffff; + background: #ccc; + background: rgba(0, 0, 0, 0.3); +} +.open .menu-dropdown, +.dropdown.open .menu-dropdown, +.open .dropdown-menu, +.dropdown.open .dropdown-menu { + display: block; +} +.tabs, .pills { + margin: 0 0 18px; + padding: 0; + list-style: none; + zoom: 1; +} +.tabs:before, +.pills:before, +.tabs:after, +.pills:after { + display: table; + content: ""; + zoom: 1; +} +.tabs:after, .pills:after { + clear: both; +} +.tabs > li, .pills > li { + float: left; +} +.tabs > li > a, .pills > li > a { + display: block; +} +.tabs { + border-color: #ddd; + border-style: solid; + border-width: 0 0 1px; +} +.tabs > li { + position: relative; + margin-bottom: -1px; +} +.tabs > li > a { + padding: 0 15px; + margin-right: 2px; + line-height: 34px; + border: 1px solid transparent; + -webkit-border-radius: 4px 4px 0 0; + -moz-border-radius: 4px 4px 0 0; + border-radius: 4px 4px 0 0; +} +.tabs > li > a:hover { + text-decoration: none; + background-color: #eee; + border-color: #eee #eee #ddd; +} +.tabs .active > a, .tabs .active > a:hover { + color: #808080; + background-color: #ffffff; + border: 1px solid #ddd; + border-bottom-color: transparent; + cursor: default; +} +.tabs .menu-dropdown, .tabs .dropdown-menu { + top: 35px; + border-width: 1px; + -webkit-border-radius: 0 6px 6px 6px; + -moz-border-radius: 0 6px 6px 6px; + border-radius: 0 6px 6px 6px; +} +.tabs a.menu:after, .tabs .dropdown-toggle:after { + border-top-color: #999; + margin-top: 15px; + margin-left: 5px; +} +.tabs li.open.menu .menu, .tabs .open.dropdown .dropdown-toggle { + border-color: #999; +} +.tabs li.open a.menu:after, .tabs .dropdown.open .dropdown-toggle:after { + border-top-color: #555; +} +.pills a { + margin: 5px 3px 5px 0; + padding: 0 15px; + line-height: 30px; + text-shadow: 0 1px 1px #ffffff; + -webkit-border-radius: 15px; + -moz-border-radius: 15px; + border-radius: 15px; +} +.pills a:hover { + color: #ffffff; + text-decoration: none; + text-shadow: 0 1px 1px rgba(0, 0, 0, 0.25); + background-color: #00438a; +} +.pills .active a { + color: #ffffff; + text-shadow: 0 1px 1px rgba(0, 0, 0, 0.25); + background-color: #0069d6; +} +.pills-vertical > li { + float: none; +} +.tab-content > .tab-pane, +.pill-content > .pill-pane, +.tab-content > div, +.pill-content > div { + display: none; +} +.tab-content > .active, .pill-content > .active { + display: block; +} +.breadcrumb { + padding: 7px 14px; + margin: 0 0 18px; + background-color: #f5f5f5; + background-repeat: repeat-x; + background-image: -khtml-gradient(linear, left top, left bottom, from(#ffffff), to(#f5f5f5)); + background-image: -moz-linear-gradient(top, #ffffff, #f5f5f5); + background-image: -ms-linear-gradient(top, #ffffff, #f5f5f5); + background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #ffffff), color-stop(100%, #f5f5f5)); + background-image: -webkit-linear-gradient(top, #ffffff, #f5f5f5); + background-image: -o-linear-gradient(top, #ffffff, #f5f5f5); + background-image: linear-gradient(top, #ffffff, #f5f5f5); + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffff', endColorstr='#f5f5f5', GradientType=0); + border: 1px solid #ddd; + -webkit-border-radius: 3px; + -moz-border-radius: 3px; + border-radius: 3px; + -webkit-box-shadow: inset 0 1px 0 #ffffff; + -moz-box-shadow: inset 0 1px 0 #ffffff; + box-shadow: inset 0 1px 0 #ffffff; +} +.breadcrumb li { + display: inline; + text-shadow: 0 1px 0 #ffffff; +} +.breadcrumb .divider { + padding: 0 5px; + color: #bfbfbf; +} +.breadcrumb .active a { + color: #404040; +} +.hero-unit { + background-color: #f5f5f5; + margin-bottom: 30px; + padding: 60px; + -webkit-border-radius: 6px; + -moz-border-radius: 6px; + border-radius: 6px; +} +.hero-unit h1 { + margin-bottom: 0; + font-size: 60px; + line-height: 1; + letter-spacing: -1px; +} +.hero-unit p { + font-size: 18px; + font-weight: 200; + line-height: 27px; +} +footer { + margin-top: 17px; + padding-top: 17px; + border-top: 1px solid #eee; +} +.page-header { + margin-bottom: 17px; + border-bottom: 1px solid #ddd; + -webkit-box-shadow: 0 1px 0 rgba(255, 255, 255, 0.5); + -moz-box-shadow: 0 1px 0 rgba(255, 255, 255, 0.5); + box-shadow: 0 1px 0 rgba(255, 255, 255, 0.5); +} +.page-header h1 { + margin-bottom: 8px; +} +.btn.danger, +.alert-message.danger, +.btn.danger:hover, +.alert-message.danger:hover, +.btn.error, +.alert-message.error, +.btn.error:hover, +.alert-message.error:hover, +.btn.success, +.alert-message.success, +.btn.success:hover, +.alert-message.success:hover, +.btn.info, +.alert-message.info, +.btn.info:hover, +.alert-message.info:hover { + color: #ffffff; +} +.btn .close, .alert-message .close { + font-family: Arial, sans-serif; + line-height: 18px; +} +.btn.danger, +.alert-message.danger, +.btn.error, +.alert-message.error { + background-color: #c43c35; + background-repeat: repeat-x; + background-image: -khtml-gradient(linear, left top, left bottom, from(#ee5f5b), to(#c43c35)); + background-image: -moz-linear-gradient(top, #ee5f5b, #c43c35); + background-image: -ms-linear-gradient(top, #ee5f5b, #c43c35); + background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #ee5f5b), color-stop(100%, #c43c35)); + background-image: -webkit-linear-gradient(top, #ee5f5b, #c43c35); + background-image: -o-linear-gradient(top, #ee5f5b, #c43c35); + background-image: linear-gradient(top, #ee5f5b, #c43c35); + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ee5f5b', endColorstr='#c43c35', GradientType=0); + text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); + border-color: #c43c35 #c43c35 #882a25; + border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); +} +.btn.success, .alert-message.success { + background-color: #57a957; + background-repeat: repeat-x; + background-image: -khtml-gradient(linear, left top, left bottom, from(#62c462), to(#57a957)); + background-image: -moz-linear-gradient(top, #62c462, #57a957); + background-image: -ms-linear-gradient(top, #62c462, #57a957); + background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #62c462), color-stop(100%, #57a957)); + background-image: -webkit-linear-gradient(top, #62c462, #57a957); + background-image: -o-linear-gradient(top, #62c462, #57a957); + background-image: linear-gradient(top, #62c462, #57a957); + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#62c462', endColorstr='#57a957', GradientType=0); + text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); + border-color: #57a957 #57a957 #3d773d; + border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); +} +.btn.info, .alert-message.info { + background-color: #339bb9; + background-repeat: repeat-x; + background-image: -khtml-gradient(linear, left top, left bottom, from(#5bc0de), to(#339bb9)); + background-image: -moz-linear-gradient(top, #5bc0de, #339bb9); + background-image: -ms-linear-gradient(top, #5bc0de, #339bb9); + background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #5bc0de), color-stop(100%, #339bb9)); + background-image: -webkit-linear-gradient(top, #5bc0de, #339bb9); + background-image: -o-linear-gradient(top, #5bc0de, #339bb9); + background-image: linear-gradient(top, #5bc0de, #339bb9); + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#5bc0de', endColorstr='#339bb9', GradientType=0); + text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); + border-color: #339bb9 #339bb9 #22697d; + border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); +} +.btn { + cursor: pointer; + display: inline-block; + background-color: #e6e6e6; + background-repeat: no-repeat; + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#ffffff), color-stop(25%, #ffffff), to(#e6e6e6)); + background-image: -webkit-linear-gradient(#ffffff, #ffffff 25%, #e6e6e6); + background-image: -moz-linear-gradient(top, #ffffff, #ffffff 25%, #e6e6e6); + background-image: -ms-linear-gradient(#ffffff, #ffffff 25%, #e6e6e6); + background-image: -o-linear-gradient(#ffffff, #ffffff 25%, #e6e6e6); + background-image: linear-gradient(#ffffff, #ffffff 25%, #e6e6e6); + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffff', endColorstr='#e6e6e6', GradientType=0); + padding: 5px 14px 6px; + text-shadow: 0 1px 1px rgba(255, 255, 255, 0.75); + color: #333; + font-size: 13px; + line-height: normal; + border: 1px solid #ccc; + border-bottom-color: #bbb; + -webkit-border-radius: 4px; + -moz-border-radius: 4px; + border-radius: 4px; + -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); + -moz-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); + box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); + -webkit-transition: 0.1s linear all; + -moz-transition: 0.1s linear all; + -ms-transition: 0.1s linear all; + -o-transition: 0.1s linear all; + transition: 0.1s linear all; +} +.btn:hover { + background-position: 0 -15px; + color: #333; + text-decoration: none; +} +.btn:focus { + outline: 1px dotted #666; +} +.btn.primary { + color: #ffffff; + background-color: #0064cd; + background-repeat: repeat-x; + background-image: -khtml-gradient(linear, left top, left bottom, from(#049cdb), to(#0064cd)); + background-image: -moz-linear-gradient(top, #049cdb, #0064cd); + background-image: -ms-linear-gradient(top, #049cdb, #0064cd); + background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #049cdb), color-stop(100%, #0064cd)); + background-image: -webkit-linear-gradient(top, #049cdb, #0064cd); + background-image: -o-linear-gradient(top, #049cdb, #0064cd); + background-image: linear-gradient(top, #049cdb, #0064cd); + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#049cdb', endColorstr='#0064cd', GradientType=0); + text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); + border-color: #0064cd #0064cd #003f81; + border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); +} +.btn.active, .btn :active { + -webkit-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.25), 0 1px 2px rgba(0, 0, 0, 0.05); + -moz-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.25), 0 1px 2px rgba(0, 0, 0, 0.05); + box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.25), 0 1px 2px rgba(0, 0, 0, 0.05); +} +.btn.disabled { + cursor: default; + background-image: none; + filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); + filter: alpha(opacity=65); + -khtml-opacity: 0.65; + -moz-opacity: 0.65; + opacity: 0.65; + -webkit-box-shadow: none; + -moz-box-shadow: none; + box-shadow: none; +} +.btn[disabled] { + cursor: default; + background-image: none; + filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); + filter: alpha(opacity=65); + -khtml-opacity: 0.65; + -moz-opacity: 0.65; + opacity: 0.65; + -webkit-box-shadow: none; + -moz-box-shadow: none; + box-shadow: none; +} +.btn.large { + font-size: 15px; + line-height: normal; + padding: 9px 14px 9px; + -webkit-border-radius: 6px; + -moz-border-radius: 6px; + border-radius: 6px; +} +.btn.small { + padding: 7px 9px 7px; + font-size: 11px; +} +:root .alert-message, :root .btn { + border-radius: 0 \0; +} +button.btn::-moz-focus-inner, input[type=submit].btn::-moz-focus-inner { + padding: 0; + border: 0; +} +.close { + float: right; + color: #000000; + font-size: 20px; + font-weight: bold; + line-height: 13.5px; + text-shadow: 0 1px 0 #ffffff; + filter: alpha(opacity=25); + -khtml-opacity: 0.25; + -moz-opacity: 0.25; + opacity: 0.25; +} +.close:hover { + color: #000000; + text-decoration: none; + filter: alpha(opacity=40); + -khtml-opacity: 0.4; + -moz-opacity: 0.4; + opacity: 0.4; +} +.alert-message { + position: relative; + padding: 7px 15px; + margin-bottom: 18px; + color: #404040; + background-color: #eedc94; + background-repeat: repeat-x; + background-image: -khtml-gradient(linear, left top, left bottom, from(#fceec1), to(#eedc94)); + background-image: -moz-linear-gradient(top, #fceec1, #eedc94); + background-image: -ms-linear-gradient(top, #fceec1, #eedc94); + background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #fceec1), color-stop(100%, #eedc94)); + background-image: -webkit-linear-gradient(top, #fceec1, #eedc94); + background-image: -o-linear-gradient(top, #fceec1, #eedc94); + background-image: linear-gradient(top, #fceec1, #eedc94); + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fceec1', endColorstr='#eedc94', GradientType=0); + text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); + border-color: #eedc94 #eedc94 #e4c652; + border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); + text-shadow: 0 1px 0 rgba(255, 255, 255, 0.5); + border-width: 1px; + border-style: solid; + -webkit-border-radius: 4px; + -moz-border-radius: 4px; + border-radius: 4px; + -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.25); + -moz-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.25); + box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.25); +} +.alert-message .close { + margin-top: 1px; + *margin-top: 0; +} +.alert-message a { + font-weight: bold; + color: #404040; +} +.alert-message.danger p a, +.alert-message.error p a, +.alert-message.success p a, +.alert-message.info p a { + color: #ffffff; +} +.alert-message h5 { + line-height: 18px; +} +.alert-message p { + margin-bottom: 0; +} +.alert-message div { + margin-top: 5px; + margin-bottom: 2px; + line-height: 28px; +} +.alert-message .btn { + -webkit-box-shadow: 0 1px 0 rgba(255, 255, 255, 0.25); + -moz-box-shadow: 0 1px 0 rgba(255, 255, 255, 0.25); + box-shadow: 0 1px 0 rgba(255, 255, 255, 0.25); +} +.alert-message.block-message { + background-image: none; + background-color: #fdf5d9; + filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); + padding: 14px; + border-color: #fceec1; + -webkit-box-shadow: none; + -moz-box-shadow: none; + box-shadow: none; +} +.alert-message.block-message ul, .alert-message.block-message p { + margin-right: 30px; +} +.alert-message.block-message ul { + margin-bottom: 0; +} +.alert-message.block-message li { + color: #404040; +} +.alert-message.block-message .alert-actions { + margin-top: 5px; +} +.alert-message.block-message.error, .alert-message.block-message.success, .alert-message.block-message.info { + color: #404040; + text-shadow: 0 1px 0 rgba(255, 255, 255, 0.5); +} +.alert-message.block-message.error { + background-color: #fddfde; + border-color: #fbc7c6; +} +.alert-message.block-message.success { + background-color: #d1eed1; + border-color: #bfe7bf; +} +.alert-message.block-message.info { + background-color: #ddf4fb; + border-color: #c6edf9; +} +.alert-message.block-message.danger p a, +.alert-message.block-message.error p a, +.alert-message.block-message.success p a, +.alert-message.block-message.info p a { + color: #404040; +} +.pagination { + height: 36px; + margin: 18px 0; +} +.pagination ul { + float: left; + margin: 0; + border: 1px solid #ddd; + border: 1px solid rgba(0, 0, 0, 0.15); + -webkit-border-radius: 3px; + -moz-border-radius: 3px; + border-radius: 3px; + -webkit-box-shadow: 0 1px 2px rgba(0, 0, 0, 0.05); + -moz-box-shadow: 0 1px 2px rgba(0, 0, 0, 0.05); + box-shadow: 0 1px 2px rgba(0, 0, 0, 0.05); +} +.pagination li { + display: inline; +} +.pagination a { + float: left; + padding: 0 14px; + line-height: 34px; + border-right: 1px solid; + border-right-color: #ddd; + border-right-color: rgba(0, 0, 0, 0.15); + *border-right-color: #ddd; + /* IE6-7 */ + + text-decoration: none; +} +.pagination a:hover, .pagination .active a { + background-color: #c7eefe; +} +.pagination .disabled a, .pagination .disabled a:hover { + background-color: transparent; + color: #bfbfbf; +} +.pagination .next a { + border: 0; +} +.well { + background-color: #f5f5f5; + margin-bottom: 20px; + padding: 19px; + min-height: 20px; + border: 1px solid #eee; + border: 1px solid rgba(0, 0, 0, 0.05); + -webkit-border-radius: 4px; + -moz-border-radius: 4px; + border-radius: 4px; + -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.05); + -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.05); + box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.05); +} +.well blockquote { + border-color: #ddd; + border-color: rgba(0, 0, 0, 0.15); +} +.modal-backdrop { + background-color: #000000; + position: fixed; + top: 0; + left: 0; + right: 0; + bottom: 0; + z-index: 10000; +} +.modal-backdrop.fade { + opacity: 0; +} +.modal-backdrop, .modal-backdrop.fade.in { + filter: alpha(opacity=80); + -khtml-opacity: 0.8; + -moz-opacity: 0.8; + opacity: 0.8; +} +.modal { + position: fixed; + top: 50%; + left: 50%; + z-index: 11000; + width: 560px; + margin: -250px 0 0 -280px; + background-color: #ffffff; + border: 1px solid #999; + border: 1px solid rgba(0, 0, 0, 0.3); + *border: 1px solid #999; + /* IE6-7 */ + + -webkit-border-radius: 6px; + -moz-border-radius: 6px; + border-radius: 6px; + -webkit-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3); + -moz-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3); + box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3); + -webkit-background-clip: padding-box; + -moz-background-clip: padding-box; + background-clip: padding-box; +} +.modal .close { + margin-top: 7px; +} +.modal.fade { + -webkit-transition: opacity .3s linear, top .3s ease-out; + -moz-transition: opacity .3s linear, top .3s ease-out; + -ms-transition: opacity .3s linear, top .3s ease-out; + -o-transition: opacity .3s linear, top .3s ease-out; + transition: opacity .3s linear, top .3s ease-out; + top: -25%; +} +.modal.fade.in { + top: 50%; +} +.modal-header { + border-bottom: 1px solid #eee; + padding: 5px 15px; +} +.modal-body { + padding: 15px; +} +.modal-body form { + margin-bottom: 0; +} +.modal-footer { + background-color: #f5f5f5; + padding: 14px 15px 15px; + border-top: 1px solid #ddd; + -webkit-border-radius: 0 0 6px 6px; + -moz-border-radius: 0 0 6px 6px; + border-radius: 0 0 6px 6px; + -webkit-box-shadow: inset 0 1px 0 #ffffff; + -moz-box-shadow: inset 0 1px 0 #ffffff; + box-shadow: inset 0 1px 0 #ffffff; + zoom: 1; + margin-bottom: 0; +} +.modal-footer:before, .modal-footer:after { + display: table; + content: ""; + zoom: 1; +} +.modal-footer:after { + clear: both; +} +.modal-footer .btn { + float: right; + margin-left: 5px; +} +.modal .popover, .modal .twipsy { + z-index: 12000; +} +.twipsy { + display: block; + position: absolute; + visibility: visible; + padding: 5px; + font-size: 11px; + z-index: 12000; + filter: alpha(opacity=80); + -khtml-opacity: 0.8; + -moz-opacity: 0.8; + opacity: 0.8; +} +.twipsy.fade.in { + filter: alpha(opacity=80); + -khtml-opacity: 0.8; + -moz-opacity: 0.8; + opacity: 0.8; +} +.twipsy.above .twipsy-arrow { + bottom: 0; + left: 50%; + margin-left: -5px; + border-left: 5px solid transparent; + border-right: 5px solid transparent; + border-top: 5px solid #000000; +} +.twipsy.left .twipsy-arrow { + top: 50%; + right: 0; + margin-top: -5px; + border-top: 5px solid transparent; + border-bottom: 5px solid transparent; + border-left: 5px solid #000000; +} +.twipsy.below .twipsy-arrow { + top: 0; + left: 50%; + margin-left: -5px; + border-left: 5px solid transparent; + border-right: 5px solid transparent; + border-bottom: 5px solid #000000; +} +.twipsy.right .twipsy-arrow { + top: 50%; + left: 0; + margin-top: -5px; + border-top: 5px solid transparent; + border-bottom: 5px solid transparent; + border-right: 5px solid #000000; +} +.twipsy-inner { + padding: 3px 8px; + background-color: #000000; + color: white; + text-align: center; + max-width: 200px; + text-decoration: none; + -webkit-border-radius: 4px; + -moz-border-radius: 4px; + border-radius: 4px; +} +.twipsy-arrow { + position: absolute; + width: 0; + height: 0; +} +.popover { + position: absolute; + top: 0; + left: 0; + z-index: 12000; + padding: 5px; + display: none; +} +.popover.above .arrow { + bottom: 0; + left: 50%; + margin-left: -5px; + border-left: 5px solid transparent; + border-right: 5px solid transparent; + border-top: 5px solid #000000; +} +.popover.right .arrow { + top: 50%; + left: 0; + margin-top: -5px; + border-top: 5px solid transparent; + border-bottom: 5px solid transparent; + border-right: 5px solid #000000; +} +.popover.below .arrow { + top: 0; + left: 50%; + margin-left: -5px; + border-left: 5px solid transparent; + border-right: 5px solid transparent; + border-bottom: 5px solid #000000; +} +.popover.left .arrow { + top: 50%; + right: 0; + margin-top: -5px; + border-top: 5px solid transparent; + border-bottom: 5px solid transparent; + border-left: 5px solid #000000; +} +.popover .arrow { + position: absolute; + width: 0; + height: 0; +} +.popover .inner { + background: #000000; + background: rgba(0, 0, 0, 0.8); + padding: 3px; + overflow: hidden; + width: 280px; + -webkit-border-radius: 6px; + -moz-border-radius: 6px; + border-radius: 6px; + -webkit-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3); + -moz-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3); + box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3); +} +.popover .title { + background-color: #f5f5f5; + padding: 9px 15px; + line-height: 1; + -webkit-border-radius: 3px 3px 0 0; + -moz-border-radius: 3px 3px 0 0; + border-radius: 3px 3px 0 0; + border-bottom: 1px solid #eee; +} +.popover .content { + background-color: #ffffff; + padding: 14px; + -webkit-border-radius: 0 0 3px 3px; + -moz-border-radius: 0 0 3px 3px; + border-radius: 0 0 3px 3px; + -webkit-background-clip: padding-box; + -moz-background-clip: padding-box; + background-clip: padding-box; +} +.popover .content p, .popover .content ul, .popover .content ol { + margin-bottom: 0; +} +.fade { + -webkit-transition: opacity 0.15s linear; + -moz-transition: opacity 0.15s linear; + -ms-transition: opacity 0.15s linear; + -o-transition: opacity 0.15s linear; + transition: opacity 0.15s linear; + opacity: 0; +} +.fade.in { + opacity: 1; +} +.label { + padding: 1px 3px 2px; + font-size: 9.75px; + font-weight: bold; + color: #ffffff; + text-transform: uppercase; + white-space: nowrap; + background-color: #bfbfbf; + -webkit-border-radius: 3px; + -moz-border-radius: 3px; + border-radius: 3px; +} +.label.important { + background-color: #c43c35; +} +.label.warning { + background-color: #f89406; +} +.label.success { + background-color: #46a546; +} +.label.notice { + background-color: #62cffc; +} +.media-grid { + margin-left: -20px; + margin-bottom: 0; + zoom: 1; +} +.media-grid:before, .media-grid:after { + display: table; + content: ""; + zoom: 1; +} +.media-grid:after { + clear: both; +} +.media-grid li { + display: inline; +} +.media-grid a { + float: left; + padding: 4px; + margin: 0 0 18px 20px; + border: 1px solid #ddd; + -webkit-border-radius: 4px; + -moz-border-radius: 4px; + border-radius: 4px; + -webkit-box-shadow: 0 1px 1px rgba(0, 0, 0, 0.075); + -moz-box-shadow: 0 1px 1px rgba(0, 0, 0, 0.075); + box-shadow: 0 1px 1px rgba(0, 0, 0, 0.075); +} +.media-grid a img { + display: block; +} +.media-grid a:hover { + border-color: #0069d6; + -webkit-box-shadow: 0 1px 4px rgba(0, 105, 214, 0.25); + -moz-box-shadow: 0 1px 4px rgba(0, 105, 214, 0.25); + box-shadow: 0 1px 4px rgba(0, 105, 214, 0.25); +} diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/bootstrap.min.css b/src/dashboard/src/media/vendor/twitter-bootstrap/bootstrap.min.css new file mode 100644 index 0000000000..3ed847a229 --- /dev/null +++ b/src/dashboard/src/media/vendor/twitter-bootstrap/bootstrap.min.css @@ -0,0 +1,356 @@ +html,body{margin:0;padding:0;} +h1,h2,h3,h4,h5,h6,p,blockquote,pre,a,abbr,acronym,address,cite,code,del,dfn,em,img,q,s,samp,small,strike,strong,sub,sup,tt,var,dd,dl,dt,li,ol,ul,fieldset,form,label,legend,button,table,caption,tbody,tfoot,thead,tr,th,td{margin:0;padding:0;border:0;font-weight:normal;font-style:normal;font-size:100%;line-height:1;font-family:inherit;} +table{border-collapse:collapse;border-spacing:0;} +ol,ul{list-style:none;} +q:before,q:after,blockquote:before,blockquote:after{content:"";} +html{overflow-y:scroll;font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%;} +a:focus{outline:thin dotted;} +a:hover,a:active{outline:0;} +article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block;} +audio,canvas,video{display:inline-block;*display:inline;*zoom:1;} +audio:not([controls]){display:none;} +sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline;} +sup{top:-0.5em;} +sub{bottom:-0.25em;} +img{border:0;-ms-interpolation-mode:bicubic;} +button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle;} +button,input{line-height:normal;*overflow:visible;} +button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0;} +button,input[type="button"],input[type="reset"],input[type="submit"]{cursor:pointer;-webkit-appearance:button;} +input[type="search"]{-webkit-appearance:textfield;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;} +input[type="search"]::-webkit-search-decoration{-webkit-appearance:none;} +textarea{overflow:auto;vertical-align:top;} +body{background-color:#ffffff;margin:0;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:13px;font-weight:normal;line-height:18px;color:#404040;} +.container{width:940px;margin-left:auto;margin-right:auto;zoom:1;}.container:before,.container:after{display:table;content:"";zoom:1;} +.container:after{clear:both;} +.container-fluid{position:relative;min-width:940px;padding-left:20px;padding-right:20px;zoom:1;}.container-fluid:before,.container-fluid:after{display:table;content:"";zoom:1;} +.container-fluid:after{clear:both;} +.container-fluid>.sidebar{position:absolute;top:0;left:20px;width:220px;} +.container-fluid>.content{margin-left:240px;} +a{color:#0069d6;text-decoration:none;line-height:inherit;font-weight:inherit;}a:hover{color:#00438a;text-decoration:underline;} +.pull-right{float:right;} +.pull-left{float:left;} +.hide{display:none;} +.show{display:block;} +.row{zoom:1;margin-left:-20px;}.row:before,.row:after{display:table;content:"";zoom:1;} +.row:after{clear:both;} +.row>[class*="span"]{display:inline;float:left;margin-left:20px;} +.span1{width:40px;} +.span2{width:100px;} +.span3{width:160px;} +.span4{width:220px;} +.span5{width:280px;} +.span6{width:340px;} +.span7{width:400px;} +.span8{width:460px;} +.span9{width:520px;} +.span10{width:580px;} +.span11{width:640px;} +.span12{width:700px;} +.span13{width:760px;} +.span14{width:820px;} +.span15{width:880px;} +.span16{width:940px;} +.span17{width:1000px;} +.span18{width:1060px;} +.span19{width:1120px;} +.span20{width:1180px;} +.span21{width:1240px;} +.span22{width:1300px;} +.span23{width:1360px;} +.span24{width:1420px;} +.row>.offset1{margin-left:80px;} +.row>.offset2{margin-left:140px;} +.row>.offset3{margin-left:200px;} +.row>.offset4{margin-left:260px;} +.row>.offset5{margin-left:320px;} +.row>.offset6{margin-left:380px;} +.row>.offset7{margin-left:440px;} +.row>.offset8{margin-left:500px;} +.row>.offset9{margin-left:560px;} +.row>.offset10{margin-left:620px;} +.row>.offset11{margin-left:680px;} +.row>.offset12{margin-left:740px;} +.span-one-third{width:300px;} +.span-two-thirds{width:620px;} +.offset-one-third{margin-left:340px;} +.offset-two-thirds{margin-left:660px;} +p{font-size:13px;font-weight:normal;line-height:18px;margin-bottom:9px;}p small{font-size:11px;color:#bfbfbf;} +h1,h2,h3,h4,h5,h6{font-weight:bold;color:#404040;}h1 small,h2 small,h3 small,h4 small,h5 small,h6 small{color:#bfbfbf;} +h1{margin-bottom:18px;font-size:30px;line-height:36px;}h1 small{font-size:18px;} +h2{font-size:24px;line-height:36px;}h2 small{font-size:14px;} +h3,h4,h5,h6{line-height:36px;} +h3{font-size:18px;}h3 small{font-size:14px;} +h4{font-size:16px;}h4 small{font-size:12px;} +h5{font-size:14px;} +h6{font-size:13px;color:#bfbfbf;text-transform:uppercase;} +ul,ol{margin:0 0 18px 25px;} +ul ul,ul ol,ol ol,ol ul{margin-bottom:0;} +ul{list-style:disc;} +ol{list-style:decimal;} +li{line-height:18px;color:#808080;} +ul.unstyled{list-style:none;margin-left:0;} +dl{margin-bottom:18px;}dl dt,dl dd{line-height:18px;} +dl dt{font-weight:bold;} +dl dd{margin-left:9px;} +hr{margin:20px 0 19px;border:0;border-bottom:1px solid #eee;} +strong{font-style:inherit;font-weight:bold;} +em{font-style:italic;font-weight:inherit;line-height:inherit;} +.muted{color:#bfbfbf;} +blockquote{margin-bottom:18px;border-left:5px solid #eee;padding-left:15px;}blockquote p{font-size:14px;font-weight:300;line-height:18px;margin-bottom:0;} +blockquote small{display:block;font-size:12px;font-weight:300;line-height:18px;color:#bfbfbf;}blockquote small:before{content:'\2014 \00A0';} +address{display:block;line-height:18px;margin-bottom:18px;} +code,pre{padding:0 3px 2px;font-family:Monaco, Andale Mono, Courier New, monospace;font-size:12px;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;} +code{background-color:#fee9cc;color:rgba(0, 0, 0, 0.75);padding:1px 3px;} +pre{background-color:#f5f5f5;display:block;padding:8.5px;margin:0 0 18px;line-height:18px;font-size:12px;border:1px solid #ccc;border:1px solid rgba(0, 0, 0, 0.15);-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;white-space:pre;white-space:pre-wrap;word-wrap:break-word;} +form{margin-bottom:18px;} +fieldset{margin-bottom:18px;padding-top:18px;}fieldset legend{display:block;padding-left:150px;font-size:19.5px;line-height:1;color:#404040;*padding:0 0 5px 145px;*line-height:1.5;} +form .clearfix{margin-bottom:18px;zoom:1;}form .clearfix:before,form .clearfix:after{display:table;content:"";zoom:1;} +form .clearfix:after{clear:both;} +label,input,select,textarea{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:13px;font-weight:normal;line-height:normal;} +label{padding-top:6px;font-size:13px;line-height:18px;float:left;width:130px;text-align:right;color:#404040;} +form .input{margin-left:150px;} +input[type=checkbox],input[type=radio]{cursor:pointer;} +input,textarea,select,.uneditable-input{display:inline-block;width:210px;height:18px;padding:4px;font-size:13px;line-height:18px;color:#808080;border:1px solid #ccc;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;} +select{padding:initial;} +input[type=checkbox],input[type=radio]{width:auto;height:auto;padding:0;margin:3px 0;*margin-top:0;line-height:normal;border:none;} +input[type=file]{background-color:#ffffff;padding:initial;border:initial;line-height:initial;-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none;} +input[type=button],input[type=reset],input[type=submit]{width:auto;height:auto;} +select,input[type=file]{height:27px;*height:auto;line-height:27px;*margin-top:4px;} +select[multiple]{height:inherit;background-color:#ffffff;} +textarea{height:auto;} +.uneditable-input{background-color:#ffffff;display:block;border-color:#eee;-webkit-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.025);-moz-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.025);box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.025);cursor:not-allowed;} +:-moz-placeholder{color:#bfbfbf;} +::-webkit-input-placeholder{color:#bfbfbf;} +input,textarea{-webkit-transition:border linear 0.2s,box-shadow linear 0.2s;-moz-transition:border linear 0.2s,box-shadow linear 0.2s;-ms-transition:border linear 0.2s,box-shadow linear 0.2s;-o-transition:border linear 0.2s,box-shadow linear 0.2s;transition:border linear 0.2s,box-shadow linear 0.2s;-webkit-box-shadow:inset 0 1px 3px rgba(0, 0, 0, 0.1);-moz-box-shadow:inset 0 1px 3px rgba(0, 0, 0, 0.1);box-shadow:inset 0 1px 3px rgba(0, 0, 0, 0.1);} +input:focus,textarea:focus{outline:0;border-color:rgba(82, 168, 236, 0.8);-webkit-box-shadow:inset 0 1px 3px rgba(0, 0, 0, 0.1),0 0 8px rgba(82, 168, 236, 0.6);-moz-box-shadow:inset 0 1px 3px rgba(0, 0, 0, 0.1),0 0 8px rgba(82, 168, 236, 0.6);box-shadow:inset 0 1px 3px rgba(0, 0, 0, 0.1),0 0 8px rgba(82, 168, 236, 0.6);} +input[type=file]:focus,input[type=checkbox]:focus,select:focus{-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none;outline:1px dotted #666;} +form .clearfix.error>label,form .clearfix.error .help-block,form .clearfix.error .help-inline{color:#b94a48;} +form .clearfix.error input,form .clearfix.error textarea{color:#b94a48;border-color:#ee5f5b;}form .clearfix.error input:focus,form .clearfix.error textarea:focus{border-color:#e9322d;-webkit-box-shadow:0 0 6px #f8b9b7;-moz-box-shadow:0 0 6px #f8b9b7;box-shadow:0 0 6px #f8b9b7;} +form .clearfix.error .input-prepend .add-on,form .clearfix.error .input-append .add-on{color:#b94a48;background-color:#fce6e6;border-color:#b94a48;} +form .clearfix.warning>label,form .clearfix.warning .help-block,form .clearfix.warning .help-inline{color:#c09853;} +form .clearfix.warning input,form .clearfix.warning textarea{color:#c09853;border-color:#ccae64;}form .clearfix.warning input:focus,form .clearfix.warning textarea:focus{border-color:#be9a3f;-webkit-box-shadow:0 0 6px #e5d6b1;-moz-box-shadow:0 0 6px #e5d6b1;box-shadow:0 0 6px #e5d6b1;} +form .clearfix.warning .input-prepend .add-on,form .clearfix.warning .input-append .add-on{color:#c09853;background-color:#d2b877;border-color:#c09853;} +form .clearfix.success>label,form .clearfix.success .help-block,form .clearfix.success .help-inline{color:#468847;} +form .clearfix.success input,form .clearfix.success textarea{color:#468847;border-color:#57a957;}form .clearfix.success input:focus,form .clearfix.success textarea:focus{border-color:#458845;-webkit-box-shadow:0 0 6px #9acc9a;-moz-box-shadow:0 0 6px #9acc9a;box-shadow:0 0 6px #9acc9a;} +form .clearfix.success .input-prepend .add-on,form .clearfix.success .input-append .add-on{color:#468847;background-color:#bcddbc;border-color:#468847;} +.input-mini,input.mini,textarea.mini,select.mini{width:60px;} +.input-small,input.small,textarea.small,select.small{width:90px;} +.input-medium,input.medium,textarea.medium,select.medium{width:150px;} +.input-large,input.large,textarea.large,select.large{width:210px;} +.input-xlarge,input.xlarge,textarea.xlarge,select.xlarge{width:270px;} +.input-xxlarge,input.xxlarge,textarea.xxlarge,select.xxlarge{width:530px;} +textarea.xxlarge{overflow-y:auto;} +input.span1,textarea.span1{display:inline-block;float:none;width:30px;margin-left:0;} +input.span2,textarea.span2{display:inline-block;float:none;width:90px;margin-left:0;} +input.span3,textarea.span3{display:inline-block;float:none;width:150px;margin-left:0;} +input.span4,textarea.span4{display:inline-block;float:none;width:210px;margin-left:0;} +input.span5,textarea.span5{display:inline-block;float:none;width:270px;margin-left:0;} +input.span6,textarea.span6{display:inline-block;float:none;width:330px;margin-left:0;} +input.span7,textarea.span7{display:inline-block;float:none;width:390px;margin-left:0;} +input.span8,textarea.span8{display:inline-block;float:none;width:450px;margin-left:0;} +input.span9,textarea.span9{display:inline-block;float:none;width:510px;margin-left:0;} +input.span10,textarea.span10{display:inline-block;float:none;width:570px;margin-left:0;} +input.span11,textarea.span11{display:inline-block;float:none;width:630px;margin-left:0;} +input.span12,textarea.span12{display:inline-block;float:none;width:690px;margin-left:0;} +input.span13,textarea.span13{display:inline-block;float:none;width:750px;margin-left:0;} +input.span14,textarea.span14{display:inline-block;float:none;width:810px;margin-left:0;} +input.span15,textarea.span15{display:inline-block;float:none;width:870px;margin-left:0;} +input.span16,textarea.span16{display:inline-block;float:none;width:930px;margin-left:0;} +input[disabled],select[disabled],textarea[disabled],input[readonly],select[readonly],textarea[readonly]{background-color:#f5f5f5;border-color:#ddd;cursor:not-allowed;} +.actions{background:#f5f5f5;margin-top:18px;margin-bottom:18px;padding:17px 20px 18px 150px;border-top:1px solid #ddd;-webkit-border-radius:0 0 3px 3px;-moz-border-radius:0 0 3px 3px;border-radius:0 0 3px 3px;}.actions .secondary-action{float:right;}.actions .secondary-action a{line-height:30px;}.actions .secondary-action a:hover{text-decoration:underline;} +.help-inline,.help-block{font-size:13px;line-height:18px;color:#bfbfbf;} +.help-inline{padding-left:5px;*position:relative;*top:-5px;} +.help-block{display:block;max-width:600px;} +.inline-inputs{color:#808080;}.inline-inputs span{padding:0 2px 0 1px;} +.input-prepend input,.input-append input{-webkit-border-radius:0 3px 3px 0;-moz-border-radius:0 3px 3px 0;border-radius:0 3px 3px 0;} +.input-prepend .add-on,.input-append .add-on{position:relative;background:#f5f5f5;border:1px solid #ccc;z-index:2;float:left;display:block;width:auto;min-width:16px;height:18px;padding:4px 4px 4px 5px;margin-right:-1px;font-weight:normal;line-height:18px;color:#bfbfbf;text-align:center;text-shadow:0 1px 0 #ffffff;-webkit-border-radius:3px 0 0 3px;-moz-border-radius:3px 0 0 3px;border-radius:3px 0 0 3px;} +.input-prepend .active,.input-append .active{background:#a9dba9;border-color:#46a546;} +.input-prepend .add-on{*margin-top:1px;} +.input-append input{float:left;-webkit-border-radius:3px 0 0 3px;-moz-border-radius:3px 0 0 3px;border-radius:3px 0 0 3px;} +.input-append .add-on{-webkit-border-radius:0 3px 3px 0;-moz-border-radius:0 3px 3px 0;border-radius:0 3px 3px 0;margin-right:0;margin-left:-1px;} +.inputs-list{margin:0 0 5px;width:100%;}.inputs-list li{display:block;padding:0;width:100%;} +.inputs-list label{display:block;float:none;width:auto;padding:0;margin-left:20px;line-height:18px;text-align:left;white-space:normal;}.inputs-list label strong{color:#808080;} +.inputs-list label small{font-size:11px;font-weight:normal;} +.inputs-list .inputs-list{margin-left:25px;margin-bottom:10px;padding-top:0;} +.inputs-list:first-child{padding-top:6px;} +.inputs-list li+li{padding-top:2px;} +.inputs-list input[type=radio],.inputs-list input[type=checkbox]{margin-bottom:0;margin-left:-20px;float:left;} +.form-stacked{padding-left:20px;}.form-stacked fieldset{padding-top:9px;} +.form-stacked legend{padding-left:0;} +.form-stacked label{display:block;float:none;width:auto;font-weight:bold;text-align:left;line-height:20px;padding-top:0;} +.form-stacked .clearfix{margin-bottom:9px;}.form-stacked .clearfix div.input{margin-left:0;} +.form-stacked .inputs-list{margin-bottom:0;}.form-stacked .inputs-list li{padding-top:0;}.form-stacked .inputs-list li label{font-weight:normal;padding-top:0;} +.form-stacked div.clearfix.error{padding-top:10px;padding-bottom:10px;padding-left:10px;margin-top:0;margin-left:-10px;} +.form-stacked .actions{margin-left:-20px;padding-left:20px;} +table{width:100%;margin-bottom:18px;padding:0;font-size:13px;border-collapse:collapse;}table th,table td{padding:10px 10px 9px;line-height:18px;text-align:left;} +table th{padding-top:9px;font-weight:bold;vertical-align:middle;} +table td{vertical-align:top;border-top:1px solid #ddd;} +table tbody th{border-top:1px solid #ddd;vertical-align:top;} +.condensed-table th,.condensed-table td{padding:5px 5px 4px;} +.bordered-table{border:1px solid #ddd;border-collapse:separate;*border-collapse:collapse;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;}.bordered-table th+th,.bordered-table td+td,.bordered-table th+td{border-left:1px solid #ddd;} +.bordered-table thead tr:first-child th:first-child,.bordered-table tbody tr:first-child td:first-child{-webkit-border-radius:4px 0 0 0;-moz-border-radius:4px 0 0 0;border-radius:4px 0 0 0;} +.bordered-table thead tr:first-child th:last-child,.bordered-table tbody tr:first-child td:last-child{-webkit-border-radius:0 4px 0 0;-moz-border-radius:0 4px 0 0;border-radius:0 4px 0 0;} +.bordered-table tbody tr:last-child td:first-child{-webkit-border-radius:0 0 0 4px;-moz-border-radius:0 0 0 4px;border-radius:0 0 0 4px;} +.bordered-table tbody tr:last-child td:last-child{-webkit-border-radius:0 0 4px 0;-moz-border-radius:0 0 4px 0;border-radius:0 0 4px 0;} +table .span1{width:20px;} +table .span2{width:60px;} +table .span3{width:100px;} +table .span4{width:140px;} +table .span5{width:180px;} +table .span6{width:220px;} +table .span7{width:260px;} +table .span8{width:300px;} +table .span9{width:340px;} +table .span10{width:380px;} +table .span11{width:420px;} +table .span12{width:460px;} +table .span13{width:500px;} +table .span14{width:540px;} +table .span15{width:580px;} +table .span16{width:620px;} +.zebra-striped tbody tr:nth-child(odd) td,.zebra-striped tbody tr:nth-child(odd) th{background-color:#f9f9f9;} +.zebra-striped tbody tr:hover td,.zebra-striped tbody tr:hover th{background-color:#f5f5f5;} +table .header{cursor:pointer;}table .header:after{content:"";float:right;margin-top:7px;border-width:0 4px 4px;border-style:solid;border-color:#000 transparent;visibility:hidden;} +table .headerSortUp,table .headerSortDown{background-color:rgba(141, 192, 219, 0.25);text-shadow:0 1px 1px rgba(255, 255, 255, 0.75);} +table .header:hover:after{visibility:visible;} +table .headerSortDown:after,table .headerSortDown:hover:after{visibility:visible;filter:alpha(opacity=60);-khtml-opacity:0.6;-moz-opacity:0.6;opacity:0.6;} +table .headerSortUp:after{border-bottom:none;border-left:4px solid transparent;border-right:4px solid transparent;border-top:4px solid #000;visibility:visible;-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none;filter:alpha(opacity=60);-khtml-opacity:0.6;-moz-opacity:0.6;opacity:0.6;} +table .blue{color:#049cdb;border-bottom-color:#049cdb;} +table .headerSortUp.blue,table .headerSortDown.blue{background-color:#ade6fe;} +table .green{color:#46a546;border-bottom-color:#46a546;} +table .headerSortUp.green,table .headerSortDown.green{background-color:#cdeacd;} +table .red{color:#9d261d;border-bottom-color:#9d261d;} +table .headerSortUp.red,table .headerSortDown.red{background-color:#f4c8c5;} +table .yellow{color:#ffc40d;border-bottom-color:#ffc40d;} +table .headerSortUp.yellow,table .headerSortDown.yellow{background-color:#fff6d9;} +table .orange{color:#f89406;border-bottom-color:#f89406;} +table .headerSortUp.orange,table .headerSortDown.orange{background-color:#fee9cc;} +table .purple{color:#7a43b6;border-bottom-color:#7a43b6;} +table .headerSortUp.purple,table .headerSortDown.purple{background-color:#e2d5f0;} +.topbar{height:40px;position:fixed;top:0;left:0;right:0;z-index:10000;overflow:visible;}.topbar a{color:#bfbfbf;text-shadow:0 -1px 0 rgba(0, 0, 0, 0.25);} +.topbar h3 a:hover,.topbar .brand:hover,.topbar ul .active>a{background-color:#333;background-color:rgba(255, 255, 255, 0.05);color:#ffffff;text-decoration:none;} +.topbar h3{position:relative;} +.topbar h3 a,.topbar .brand{float:left;display:block;padding:8px 20px 12px;margin-left:-20px;color:#ffffff;font-size:20px;font-weight:200;line-height:1;} +.topbar p{margin:0;line-height:40px;}.topbar p a:hover{background-color:transparent;color:#ffffff;} +.topbar form{float:left;margin:5px 0 0 0;position:relative;filter:alpha(opacity=100);-khtml-opacity:1;-moz-opacity:1;opacity:1;} +.topbar form.pull-right{float:right;} +.topbar input{background-color:#444;background-color:rgba(255, 255, 255, 0.3);font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:normal;font-weight:13px;line-height:1;padding:4px 9px;color:#ffffff;color:rgba(255, 255, 255, 0.75);border:1px solid #111;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1),0 1px 0px rgba(255, 255, 255, 0.25);-moz-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1),0 1px 0px rgba(255, 255, 255, 0.25);box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1),0 1px 0px rgba(255, 255, 255, 0.25);-webkit-transition:none;-moz-transition:none;-ms-transition:none;-o-transition:none;transition:none;}.topbar input:-moz-placeholder{color:#e6e6e6;} +.topbar input::-webkit-input-placeholder{color:#e6e6e6;} +.topbar input:hover{background-color:#bfbfbf;background-color:rgba(255, 255, 255, 0.5);color:#ffffff;} +.topbar input:focus,.topbar input.focused{outline:0;background-color:#ffffff;color:#404040;text-shadow:0 1px 0 #ffffff;border:0;padding:5px 10px;-webkit-box-shadow:0 0 3px rgba(0, 0, 0, 0.15);-moz-box-shadow:0 0 3px rgba(0, 0, 0, 0.15);box-shadow:0 0 3px rgba(0, 0, 0, 0.15);} +.topbar-inner,.topbar .fill{background-color:#222;background-color:#222222;background-repeat:repeat-x;background-image:-khtml-gradient(linear, left top, left bottom, from(#333333), to(#222222));background-image:-moz-linear-gradient(top, #333333, #222222);background-image:-ms-linear-gradient(top, #333333, #222222);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0%, #333333), color-stop(100%, #222222));background-image:-webkit-linear-gradient(top, #333333, #222222);background-image:-o-linear-gradient(top, #333333, #222222);background-image:linear-gradient(top, #333333, #222222);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#333333', endColorstr='#222222', GradientType=0);-webkit-box-shadow:0 1px 3px rgba(0, 0, 0, 0.25),inset 0 -1px 0 rgba(0, 0, 0, 0.1);-moz-box-shadow:0 1px 3px rgba(0, 0, 0, 0.25),inset 0 -1px 0 rgba(0, 0, 0, 0.1);box-shadow:0 1px 3px rgba(0, 0, 0, 0.25),inset 0 -1px 0 rgba(0, 0, 0, 0.1);} +.topbar div>ul,.nav{display:block;float:left;margin:0 10px 0 0;position:relative;left:0;}.topbar div>ul>li,.nav>li{display:block;float:left;} +.topbar div>ul a,.nav a{display:block;float:none;padding:10px 10px 11px;line-height:19px;text-decoration:none;}.topbar div>ul a:hover,.nav a:hover{color:#ffffff;text-decoration:none;} +.topbar div>ul .active>a,.nav .active>a{background-color:#222;background-color:rgba(0, 0, 0, 0.5);} +.topbar div>ul.secondary-nav,.nav.secondary-nav{float:right;margin-left:10px;margin-right:0;}.topbar div>ul.secondary-nav .menu-dropdown,.nav.secondary-nav .menu-dropdown,.topbar div>ul.secondary-nav .dropdown-menu,.nav.secondary-nav .dropdown-menu{right:0;border:0;} +.topbar div>ul a.menu:hover,.nav a.menu:hover,.topbar div>ul li.open .menu,.nav li.open .menu,.topbar div>ul .dropdown-toggle:hover,.nav .dropdown-toggle:hover,.topbar div>ul .dropdown.open .dropdown-toggle,.nav .dropdown.open .dropdown-toggle{background:#444;background:rgba(255, 255, 255, 0.05);} +.topbar div>ul .menu-dropdown,.nav .menu-dropdown,.topbar div>ul .dropdown-menu,.nav .dropdown-menu{background-color:#333;}.topbar div>ul .menu-dropdown a.menu,.nav .menu-dropdown a.menu,.topbar div>ul .dropdown-menu a.menu,.nav .dropdown-menu a.menu,.topbar div>ul .menu-dropdown .dropdown-toggle,.nav .menu-dropdown .dropdown-toggle,.topbar div>ul .dropdown-menu .dropdown-toggle,.nav .dropdown-menu .dropdown-toggle{color:#ffffff;}.topbar div>ul .menu-dropdown a.menu.open,.nav .menu-dropdown a.menu.open,.topbar div>ul .dropdown-menu a.menu.open,.nav .dropdown-menu a.menu.open,.topbar div>ul .menu-dropdown .dropdown-toggle.open,.nav .menu-dropdown .dropdown-toggle.open,.topbar div>ul .dropdown-menu .dropdown-toggle.open,.nav .dropdown-menu .dropdown-toggle.open{background:#444;background:rgba(255, 255, 255, 0.05);} +.topbar div>ul .menu-dropdown li a,.nav .menu-dropdown li a,.topbar div>ul .dropdown-menu li a,.nav .dropdown-menu li a{color:#999;text-shadow:0 1px 0 rgba(0, 0, 0, 0.5);}.topbar div>ul .menu-dropdown li a:hover,.nav .menu-dropdown li a:hover,.topbar div>ul .dropdown-menu li a:hover,.nav .dropdown-menu li a:hover{background-color:#191919;background-repeat:repeat-x;background-image:-khtml-gradient(linear, left top, left bottom, from(#292929), to(#191919));background-image:-moz-linear-gradient(top, #292929, #191919);background-image:-ms-linear-gradient(top, #292929, #191919);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0%, #292929), color-stop(100%, #191919));background-image:-webkit-linear-gradient(top, #292929, #191919);background-image:-o-linear-gradient(top, #292929, #191919);background-image:linear-gradient(top, #292929, #191919);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#292929', endColorstr='#191919', GradientType=0);color:#ffffff;} +.topbar div>ul .menu-dropdown .active a,.nav .menu-dropdown .active a,.topbar div>ul .dropdown-menu .active a,.nav .dropdown-menu .active a{color:#ffffff;} +.topbar div>ul .menu-dropdown .divider,.nav .menu-dropdown .divider,.topbar div>ul .dropdown-menu .divider,.nav .dropdown-menu .divider{background-color:#222;border-color:#444;} +.topbar ul .menu-dropdown li a,.topbar ul .dropdown-menu li a{padding:4px 15px;} +li.menu,.dropdown{position:relative;} +a.menu:after,.dropdown-toggle:after{width:0;height:0;display:inline-block;content:"↓";text-indent:-99999px;vertical-align:top;margin-top:8px;margin-left:4px;border-left:4px solid transparent;border-right:4px solid transparent;border-top:4px solid #ffffff;filter:alpha(opacity=50);-khtml-opacity:0.5;-moz-opacity:0.5;opacity:0.5;} +.menu-dropdown,.dropdown-menu{background-color:#ffffff;float:left;display:none;position:absolute;top:40px;z-index:900;min-width:160px;max-width:220px;_width:160px;margin-left:0;margin-right:0;padding:6px 0;zoom:1;border-color:#999;border-color:rgba(0, 0, 0, 0.2);border-style:solid;border-width:0 1px 1px;-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px;-webkit-box-shadow:0 2px 4px rgba(0, 0, 0, 0.2);-moz-box-shadow:0 2px 4px rgba(0, 0, 0, 0.2);box-shadow:0 2px 4px rgba(0, 0, 0, 0.2);-webkit-background-clip:padding-box;-moz-background-clip:padding-box;background-clip:padding-box;}.menu-dropdown li,.dropdown-menu li{float:none;display:block;background-color:none;} +.menu-dropdown .divider,.dropdown-menu .divider{height:1px;margin:5px 0;overflow:hidden;background-color:#eee;border-bottom:1px solid #ffffff;} +.topbar .dropdown-menu a,.dropdown-menu a{display:block;padding:4px 15px;clear:both;font-weight:normal;line-height:18px;color:#808080;text-shadow:0 1px 0 #ffffff;}.topbar .dropdown-menu a:hover,.dropdown-menu a:hover,.topbar .dropdown-menu a.hover,.dropdown-menu a.hover{background-color:#dddddd;background-repeat:repeat-x;background-image:-khtml-gradient(linear, left top, left bottom, from(#eeeeee), to(#dddddd));background-image:-moz-linear-gradient(top, #eeeeee, #dddddd);background-image:-ms-linear-gradient(top, #eeeeee, #dddddd);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0%, #eeeeee), color-stop(100%, #dddddd));background-image:-webkit-linear-gradient(top, #eeeeee, #dddddd);background-image:-o-linear-gradient(top, #eeeeee, #dddddd);background-image:linear-gradient(top, #eeeeee, #dddddd);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#eeeeee', endColorstr='#dddddd', GradientType=0);color:#404040;text-decoration:none;-webkit-box-shadow:inset 0 1px 0 rgba(0, 0, 0, 0.025),inset 0 -1px rgba(0, 0, 0, 0.025);-moz-box-shadow:inset 0 1px 0 rgba(0, 0, 0, 0.025),inset 0 -1px rgba(0, 0, 0, 0.025);box-shadow:inset 0 1px 0 rgba(0, 0, 0, 0.025),inset 0 -1px rgba(0, 0, 0, 0.025);} +.open .menu,.dropdown.open .menu,.open .dropdown-toggle,.dropdown.open .dropdown-toggle{color:#ffffff;background:#ccc;background:rgba(0, 0, 0, 0.3);} +.open .menu-dropdown,.dropdown.open .menu-dropdown,.open .dropdown-menu,.dropdown.open .dropdown-menu{display:block;} +.tabs,.pills{margin:0 0 18px;padding:0;list-style:none;zoom:1;}.tabs:before,.pills:before,.tabs:after,.pills:after{display:table;content:"";zoom:1;} +.tabs:after,.pills:after{clear:both;} +.tabs>li,.pills>li{float:left;}.tabs>li>a,.pills>li>a{display:block;} +.tabs{border-color:#ddd;border-style:solid;border-width:0 0 1px;}.tabs>li{position:relative;margin-bottom:-1px;}.tabs>li>a{padding:0 15px;margin-right:2px;line-height:34px;border:1px solid transparent;-webkit-border-radius:4px 4px 0 0;-moz-border-radius:4px 4px 0 0;border-radius:4px 4px 0 0;}.tabs>li>a:hover{text-decoration:none;background-color:#eee;border-color:#eee #eee #ddd;} +.tabs .active>a,.tabs .active>a:hover{color:#808080;background-color:#ffffff;border:1px solid #ddd;border-bottom-color:transparent;cursor:default;} +.tabs .menu-dropdown,.tabs .dropdown-menu{top:35px;border-width:1px;-webkit-border-radius:0 6px 6px 6px;-moz-border-radius:0 6px 6px 6px;border-radius:0 6px 6px 6px;} +.tabs a.menu:after,.tabs .dropdown-toggle:after{border-top-color:#999;margin-top:15px;margin-left:5px;} +.tabs li.open.menu .menu,.tabs .open.dropdown .dropdown-toggle{border-color:#999;} +.tabs li.open a.menu:after,.tabs .dropdown.open .dropdown-toggle:after{border-top-color:#555;} +.pills a{margin:5px 3px 5px 0;padding:0 15px;line-height:30px;text-shadow:0 1px 1px #ffffff;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px;}.pills a:hover{color:#ffffff;text-decoration:none;text-shadow:0 1px 1px rgba(0, 0, 0, 0.25);background-color:#00438a;} +.pills .active a{color:#ffffff;text-shadow:0 1px 1px rgba(0, 0, 0, 0.25);background-color:#0069d6;} +.pills-vertical>li{float:none;} +.tab-content>.tab-pane,.pill-content>.pill-pane,.tab-content>div,.pill-content>div{display:none;} +.tab-content>.active,.pill-content>.active{display:block;} +.breadcrumb{padding:7px 14px;margin:0 0 18px;background-color:#f5f5f5;background-repeat:repeat-x;background-image:-khtml-gradient(linear, left top, left bottom, from(#ffffff), to(#f5f5f5));background-image:-moz-linear-gradient(top, #ffffff, #f5f5f5);background-image:-ms-linear-gradient(top, #ffffff, #f5f5f5);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0%, #ffffff), color-stop(100%, #f5f5f5));background-image:-webkit-linear-gradient(top, #ffffff, #f5f5f5);background-image:-o-linear-gradient(top, #ffffff, #f5f5f5);background-image:linear-gradient(top, #ffffff, #f5f5f5);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffff', endColorstr='#f5f5f5', GradientType=0);border:1px solid #ddd;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;-webkit-box-shadow:inset 0 1px 0 #ffffff;-moz-box-shadow:inset 0 1px 0 #ffffff;box-shadow:inset 0 1px 0 #ffffff;}.breadcrumb li{display:inline;text-shadow:0 1px 0 #ffffff;} +.breadcrumb .divider{padding:0 5px;color:#bfbfbf;} +.breadcrumb .active a{color:#404040;} +.hero-unit{background-color:#f5f5f5;margin-bottom:30px;padding:60px;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;}.hero-unit h1{margin-bottom:0;font-size:60px;line-height:1;letter-spacing:-1px;} +.hero-unit p{font-size:18px;font-weight:200;line-height:27px;} +footer{margin-top:17px;padding-top:17px;border-top:1px solid #eee;} +.page-header{margin-bottom:17px;border-bottom:1px solid #ddd;-webkit-box-shadow:0 1px 0 rgba(255, 255, 255, 0.5);-moz-box-shadow:0 1px 0 rgba(255, 255, 255, 0.5);box-shadow:0 1px 0 rgba(255, 255, 255, 0.5);}.page-header h1{margin-bottom:8px;} +.btn.danger,.alert-message.danger,.btn.danger:hover,.alert-message.danger:hover,.btn.error,.alert-message.error,.btn.error:hover,.alert-message.error:hover,.btn.success,.alert-message.success,.btn.success:hover,.alert-message.success:hover,.btn.info,.alert-message.info,.btn.info:hover,.alert-message.info:hover{color:#ffffff;} +.btn .close,.alert-message .close{font-family:Arial,sans-serif;line-height:18px;} +.btn.danger,.alert-message.danger,.btn.error,.alert-message.error{background-color:#c43c35;background-repeat:repeat-x;background-image:-khtml-gradient(linear, left top, left bottom, from(#ee5f5b), to(#c43c35));background-image:-moz-linear-gradient(top, #ee5f5b, #c43c35);background-image:-ms-linear-gradient(top, #ee5f5b, #c43c35);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0%, #ee5f5b), color-stop(100%, #c43c35));background-image:-webkit-linear-gradient(top, #ee5f5b, #c43c35);background-image:-o-linear-gradient(top, #ee5f5b, #c43c35);background-image:linear-gradient(top, #ee5f5b, #c43c35);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ee5f5b', endColorstr='#c43c35', GradientType=0);text-shadow:0 -1px 0 rgba(0, 0, 0, 0.25);border-color:#c43c35 #c43c35 #882a25;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);} +.btn.success,.alert-message.success{background-color:#57a957;background-repeat:repeat-x;background-image:-khtml-gradient(linear, left top, left bottom, from(#62c462), to(#57a957));background-image:-moz-linear-gradient(top, #62c462, #57a957);background-image:-ms-linear-gradient(top, #62c462, #57a957);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0%, #62c462), color-stop(100%, #57a957));background-image:-webkit-linear-gradient(top, #62c462, #57a957);background-image:-o-linear-gradient(top, #62c462, #57a957);background-image:linear-gradient(top, #62c462, #57a957);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#62c462', endColorstr='#57a957', GradientType=0);text-shadow:0 -1px 0 rgba(0, 0, 0, 0.25);border-color:#57a957 #57a957 #3d773d;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);} +.btn.info,.alert-message.info{background-color:#339bb9;background-repeat:repeat-x;background-image:-khtml-gradient(linear, left top, left bottom, from(#5bc0de), to(#339bb9));background-image:-moz-linear-gradient(top, #5bc0de, #339bb9);background-image:-ms-linear-gradient(top, #5bc0de, #339bb9);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0%, #5bc0de), color-stop(100%, #339bb9));background-image:-webkit-linear-gradient(top, #5bc0de, #339bb9);background-image:-o-linear-gradient(top, #5bc0de, #339bb9);background-image:linear-gradient(top, #5bc0de, #339bb9);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#5bc0de', endColorstr='#339bb9', GradientType=0);text-shadow:0 -1px 0 rgba(0, 0, 0, 0.25);border-color:#339bb9 #339bb9 #22697d;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);} +.btn{cursor:pointer;display:inline-block;background-color:#e6e6e6;background-repeat:no-repeat;background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#ffffff), color-stop(25%, #ffffff), to(#e6e6e6));background-image:-webkit-linear-gradient(#ffffff, #ffffff 25%, #e6e6e6);background-image:-moz-linear-gradient(top, #ffffff, #ffffff 25%, #e6e6e6);background-image:-ms-linear-gradient(#ffffff, #ffffff 25%, #e6e6e6);background-image:-o-linear-gradient(#ffffff, #ffffff 25%, #e6e6e6);background-image:linear-gradient(#ffffff, #ffffff 25%, #e6e6e6);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffff', endColorstr='#e6e6e6', GradientType=0);padding:5px 14px 6px;text-shadow:0 1px 1px rgba(255, 255, 255, 0.75);color:#333;font-size:13px;line-height:normal;border:1px solid #ccc;border-bottom-color:#bbb;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.2),0 1px 2px rgba(0, 0, 0, 0.05);-moz-box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.2),0 1px 2px rgba(0, 0, 0, 0.05);box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.2),0 1px 2px rgba(0, 0, 0, 0.05);-webkit-transition:0.1s linear all;-moz-transition:0.1s linear all;-ms-transition:0.1s linear all;-o-transition:0.1s linear all;transition:0.1s linear all;}.btn:hover{background-position:0 -15px;color:#333;text-decoration:none;} +.btn:focus{outline:1px dotted #666;} +.btn.primary{color:#ffffff;background-color:#0064cd;background-repeat:repeat-x;background-image:-khtml-gradient(linear, left top, left bottom, from(#049cdb), to(#0064cd));background-image:-moz-linear-gradient(top, #049cdb, #0064cd);background-image:-ms-linear-gradient(top, #049cdb, #0064cd);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0%, #049cdb), color-stop(100%, #0064cd));background-image:-webkit-linear-gradient(top, #049cdb, #0064cd);background-image:-o-linear-gradient(top, #049cdb, #0064cd);background-image:linear-gradient(top, #049cdb, #0064cd);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#049cdb', endColorstr='#0064cd', GradientType=0);text-shadow:0 -1px 0 rgba(0, 0, 0, 0.25);border-color:#0064cd #0064cd #003f81;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);} +.btn.active,.btn :active{-webkit-box-shadow:inset 0 2px 4px rgba(0, 0, 0, 0.25),0 1px 2px rgba(0, 0, 0, 0.05);-moz-box-shadow:inset 0 2px 4px rgba(0, 0, 0, 0.25),0 1px 2px rgba(0, 0, 0, 0.05);box-shadow:inset 0 2px 4px rgba(0, 0, 0, 0.25),0 1px 2px rgba(0, 0, 0, 0.05);} +.btn.disabled{cursor:default;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=65);-khtml-opacity:0.65;-moz-opacity:0.65;opacity:0.65;-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none;} +.btn[disabled]{cursor:default;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=65);-khtml-opacity:0.65;-moz-opacity:0.65;opacity:0.65;-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none;} +.btn.large{font-size:15px;line-height:normal;padding:9px 14px 9px;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;} +.btn.small{padding:7px 9px 7px;font-size:11px;} +:root .alert-message,:root .btn{border-radius:0 \0;} +button.btn::-moz-focus-inner,input[type=submit].btn::-moz-focus-inner{padding:0;border:0;} +.close{float:right;color:#000000;font-size:20px;font-weight:bold;line-height:13.5px;text-shadow:0 1px 0 #ffffff;filter:alpha(opacity=25);-khtml-opacity:0.25;-moz-opacity:0.25;opacity:0.25;}.close:hover{color:#000000;text-decoration:none;filter:alpha(opacity=40);-khtml-opacity:0.4;-moz-opacity:0.4;opacity:0.4;} +.alert-message{position:relative;padding:7px 15px;margin-bottom:18px;color:#404040;background-color:#eedc94;background-repeat:repeat-x;background-image:-khtml-gradient(linear, left top, left bottom, from(#fceec1), to(#eedc94));background-image:-moz-linear-gradient(top, #fceec1, #eedc94);background-image:-ms-linear-gradient(top, #fceec1, #eedc94);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0%, #fceec1), color-stop(100%, #eedc94));background-image:-webkit-linear-gradient(top, #fceec1, #eedc94);background-image:-o-linear-gradient(top, #fceec1, #eedc94);background-image:linear-gradient(top, #fceec1, #eedc94);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fceec1', endColorstr='#eedc94', GradientType=0);text-shadow:0 -1px 0 rgba(0, 0, 0, 0.25);border-color:#eedc94 #eedc94 #e4c652;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);text-shadow:0 1px 0 rgba(255, 255, 255, 0.5);border-width:1px;border-style:solid;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.25);-moz-box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.25);box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.25);}.alert-message .close{margin-top:1px;*margin-top:0;} +.alert-message a{font-weight:bold;color:#404040;} +.alert-message.danger p a,.alert-message.error p a,.alert-message.success p a,.alert-message.info p a{color:#ffffff;} +.alert-message h5{line-height:18px;} +.alert-message p{margin-bottom:0;} +.alert-message div{margin-top:5px;margin-bottom:2px;line-height:28px;} +.alert-message .btn{-webkit-box-shadow:0 1px 0 rgba(255, 255, 255, 0.25);-moz-box-shadow:0 1px 0 rgba(255, 255, 255, 0.25);box-shadow:0 1px 0 rgba(255, 255, 255, 0.25);} +.alert-message.block-message{background-image:none;background-color:#fdf5d9;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);padding:14px;border-color:#fceec1;-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none;}.alert-message.block-message ul,.alert-message.block-message p{margin-right:30px;} +.alert-message.block-message ul{margin-bottom:0;} +.alert-message.block-message li{color:#404040;} +.alert-message.block-message .alert-actions{margin-top:5px;} +.alert-message.block-message.error,.alert-message.block-message.success,.alert-message.block-message.info{color:#404040;text-shadow:0 1px 0 rgba(255, 255, 255, 0.5);} +.alert-message.block-message.error{background-color:#fddfde;border-color:#fbc7c6;} +.alert-message.block-message.success{background-color:#d1eed1;border-color:#bfe7bf;} +.alert-message.block-message.info{background-color:#ddf4fb;border-color:#c6edf9;} +.alert-message.block-message.danger p a,.alert-message.block-message.error p a,.alert-message.block-message.success p a,.alert-message.block-message.info p a{color:#404040;} +.pagination{height:36px;margin:18px 0;}.pagination ul{float:left;margin:0;border:1px solid #ddd;border:1px solid rgba(0, 0, 0, 0.15);-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;-webkit-box-shadow:0 1px 2px rgba(0, 0, 0, 0.05);-moz-box-shadow:0 1px 2px rgba(0, 0, 0, 0.05);box-shadow:0 1px 2px rgba(0, 0, 0, 0.05);} +.pagination li{display:inline;} +.pagination a{float:left;padding:0 14px;line-height:34px;border-right:1px solid;border-right-color:#ddd;border-right-color:rgba(0, 0, 0, 0.15);*border-right-color:#ddd;text-decoration:none;} +.pagination a:hover,.pagination .active a{background-color:#c7eefe;} +.pagination .disabled a,.pagination .disabled a:hover{background-color:transparent;color:#bfbfbf;} +.pagination .next a{border:0;} +.well{background-color:#f5f5f5;margin-bottom:20px;padding:19px;min-height:20px;border:1px solid #eee;border:1px solid rgba(0, 0, 0, 0.05);-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.05);-moz-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.05);box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.05);}.well blockquote{border-color:#ddd;border-color:rgba(0, 0, 0, 0.15);} +.modal-backdrop{background-color:#000000;position:fixed;top:0;left:0;right:0;bottom:0;z-index:10000;}.modal-backdrop.fade{opacity:0;} +.modal-backdrop,.modal-backdrop.fade.in{filter:alpha(opacity=80);-khtml-opacity:0.8;-moz-opacity:0.8;opacity:0.8;} +.modal{position:fixed;top:50%;left:50%;z-index:11000;width:560px;margin:-250px 0 0 -280px;background-color:#ffffff;border:1px solid #999;border:1px solid rgba(0, 0, 0, 0.3);*border:1px solid #999;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 3px 7px rgba(0, 0, 0, 0.3);-moz-box-shadow:0 3px 7px rgba(0, 0, 0, 0.3);box-shadow:0 3px 7px rgba(0, 0, 0, 0.3);-webkit-background-clip:padding-box;-moz-background-clip:padding-box;background-clip:padding-box;}.modal .close{margin-top:7px;} +.modal.fade{-webkit-transition:opacity .3s linear, top .3s ease-out;-moz-transition:opacity .3s linear, top .3s ease-out;-ms-transition:opacity .3s linear, top .3s ease-out;-o-transition:opacity .3s linear, top .3s ease-out;transition:opacity .3s linear, top .3s ease-out;top:-25%;} +.modal.fade.in{top:50%;} +.modal-header{border-bottom:1px solid #eee;padding:5px 15px;} +.modal-body{padding:15px;} +.modal-body form{margin-bottom:0;} +.modal-footer{background-color:#f5f5f5;padding:14px 15px 15px;border-top:1px solid #ddd;-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px;-webkit-box-shadow:inset 0 1px 0 #ffffff;-moz-box-shadow:inset 0 1px 0 #ffffff;box-shadow:inset 0 1px 0 #ffffff;zoom:1;margin-bottom:0;}.modal-footer:before,.modal-footer:after{display:table;content:"";zoom:1;} +.modal-footer:after{clear:both;} +.modal-footer .btn{float:right;margin-left:5px;} +.modal .popover,.modal .twipsy{z-index:12000;} +.twipsy{display:block;position:absolute;visibility:visible;padding:5px;font-size:11px;z-index:12000;filter:alpha(opacity=80);-khtml-opacity:0.8;-moz-opacity:0.8;opacity:0.8;}.twipsy.fade.in{filter:alpha(opacity=80);-khtml-opacity:0.8;-moz-opacity:0.8;opacity:0.8;} +.twipsy.above .twipsy-arrow{bottom:0;left:50%;margin-left:-5px;border-left:5px solid transparent;border-right:5px solid transparent;border-top:5px solid #000000;} +.twipsy.left .twipsy-arrow{top:50%;right:0;margin-top:-5px;border-top:5px solid transparent;border-bottom:5px solid transparent;border-left:5px solid #000000;} +.twipsy.below .twipsy-arrow{top:0;left:50%;margin-left:-5px;border-left:5px solid transparent;border-right:5px solid transparent;border-bottom:5px solid #000000;} +.twipsy.right .twipsy-arrow{top:50%;left:0;margin-top:-5px;border-top:5px solid transparent;border-bottom:5px solid transparent;border-right:5px solid #000000;} +.twipsy-inner{padding:3px 8px;background-color:#000000;color:white;text-align:center;max-width:200px;text-decoration:none;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;} +.twipsy-arrow{position:absolute;width:0;height:0;} +.popover{position:absolute;top:0;left:0;z-index:12000;padding:5px;display:none;}.popover.above .arrow{bottom:0;left:50%;margin-left:-5px;border-left:5px solid transparent;border-right:5px solid transparent;border-top:5px solid #000000;} +.popover.right .arrow{top:50%;left:0;margin-top:-5px;border-top:5px solid transparent;border-bottom:5px solid transparent;border-right:5px solid #000000;} +.popover.below .arrow{top:0;left:50%;margin-left:-5px;border-left:5px solid transparent;border-right:5px solid transparent;border-bottom:5px solid #000000;} +.popover.left .arrow{top:50%;right:0;margin-top:-5px;border-top:5px solid transparent;border-bottom:5px solid transparent;border-left:5px solid #000000;} +.popover .arrow{position:absolute;width:0;height:0;} +.popover .inner{background:#000000;background:rgba(0, 0, 0, 0.8);padding:3px;overflow:hidden;width:280px;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 3px 7px rgba(0, 0, 0, 0.3);-moz-box-shadow:0 3px 7px rgba(0, 0, 0, 0.3);box-shadow:0 3px 7px rgba(0, 0, 0, 0.3);} +.popover .title{background-color:#f5f5f5;padding:9px 15px;line-height:1;-webkit-border-radius:3px 3px 0 0;-moz-border-radius:3px 3px 0 0;border-radius:3px 3px 0 0;border-bottom:1px solid #eee;} +.popover .content{background-color:#ffffff;padding:14px;-webkit-border-radius:0 0 3px 3px;-moz-border-radius:0 0 3px 3px;border-radius:0 0 3px 3px;-webkit-background-clip:padding-box;-moz-background-clip:padding-box;background-clip:padding-box;}.popover .content p,.popover .content ul,.popover .content ol{margin-bottom:0;} +.fade{-webkit-transition:opacity 0.15s linear;-moz-transition:opacity 0.15s linear;-ms-transition:opacity 0.15s linear;-o-transition:opacity 0.15s linear;transition:opacity 0.15s linear;opacity:0;}.fade.in{opacity:1;} +.label{padding:1px 3px 2px;font-size:9.75px;font-weight:bold;color:#ffffff;text-transform:uppercase;white-space:nowrap;background-color:#bfbfbf;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;}.label.important{background-color:#c43c35;} +.label.warning{background-color:#f89406;} +.label.success{background-color:#46a546;} +.label.notice{background-color:#62cffc;} +.media-grid{margin-left:-20px;margin-bottom:0;zoom:1;}.media-grid:before,.media-grid:after{display:table;content:"";zoom:1;} +.media-grid:after{clear:both;} +.media-grid li{display:inline;} +.media-grid a{float:left;padding:4px;margin:0 0 18px 20px;border:1px solid #ddd;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:0 1px 1px rgba(0, 0, 0, 0.075);-moz-box-shadow:0 1px 1px rgba(0, 0, 0, 0.075);box-shadow:0 1px 1px rgba(0, 0, 0, 0.075);}.media-grid a img{display:block;} +.media-grid a:hover{border-color:#0069d6;-webkit-box-shadow:0 1px 4px rgba(0, 105, 214, 0.25);-moz-box-shadow:0 1px 4px rgba(0, 105, 214, 0.25);box-shadow:0 1px 4px rgba(0, 105, 214, 0.25);} diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/css/docs.css b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/css/docs.css new file mode 100644 index 0000000000..fb727a900a --- /dev/null +++ b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/css/docs.css @@ -0,0 +1,317 @@ +/* Add additional stylesheets below +-------------------------------------------------- */ +/* + Bootstrap's documentation styles + Special styles for presenting Bootstrap's documentation and examples +*/ + +/* Body and structure +-------------------------------------------------- */ +body { + background-color: #fff; + position: relative; +} +section { + padding-top: 60px; +} +section > .row { + margin-bottom: 10px; +} + + +/* Jumbotrons +-------------------------------------------------- */ +.jumbotron { + min-width: 940px; + padding-top: 40px; +} +.jumbotron .inner { + background: transparent url(../img/grid-18px.png) top center; + padding: 45px 0; + -webkit-box-shadow: inset 0 10px 30px rgba(0,0,0,.3); + -moz-box-shadow: inset 0 10px 30px rgba(0,0,0,.3); +/* box-shadow: inset 0 10px 30px rgba(0,0,0,.3); +*/} +.jumbotron h1, +.jumbotron p { + margin-bottom: 9px; + color: #fff; + text-align: center; + text-shadow: 0 1px 1px rgba(0,0,0,.3); +} +.jumbotron h1 { + font-size: 54px; + line-height: 1; + text-shadow: 0 1px 2px rgba(0,0,0,.5); +} +.jumbotron p { + font-weight: 300; +} +.jumbotron .lead { + font-size: 20px; + line-height: 27px; +} +.jumbotron p a { + color: #fff; + font-weight: bold; +} + +/* Specific jumbotrons +------------------------- */ +/* main docs page */ +.masthead { + background-color: #049cd9; + background-repeat: no-repeat; + background-image: -webkit-gradient(linear, left top, left bottom, from(#004D9F), to(#049cd9)); + background-image: -webkit-linear-gradient(#004D9F, #049cd9); + background-image: -moz-linear-gradient(#004D9F, #049cd9); + background-image: -o-linear-gradient(top, #004D9F, #049cd9); + background-image: -khtml-gradient(linear, left top, left bottom, from(#004D9F), to(#049cd9)); + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#004D9F', endColorstr='#049cd9', GradientType=0); /* IE8 and down */ +} +/* supporting docs pages */ +.subhead { + background-color: #767d80; + background-repeat: no-repeat; + background-image: -webkit-gradient(linear, left top, left bottom, from(#565d60), to(#767d80)); + background-image: -webkit-linear-gradient(#565d60, #767d80); + background-image: -moz-linear-gradient(#565d60, #767d80); + background-image: -o-linear-gradient(top, #565d60, #767d80); + background-image: -khtml-gradient(linear, left top, left bottom, from(#565d60), to(#767d80)); + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#565d60', endColorstr='#767d80', GradientType=0); /* IE8 and down */ +} +.subhead .inner { + padding: 36px 0 27px; +} +.subhead h1, +.subhead p { + text-align: left; +} +.subhead h1 { + font-size: 40px; +} +.subhead p a { + font-weight: normal; +} + + +/* Footer +-------------------------------------------------- */ +.footer { + background-color: #eee; + min-width: 940px; + padding: 30px 0; + text-shadow: 0 1px 0 #fff; + border-top: 1px solid #e5e5e5; + -webkit-box-shadow: inset 0 5px 15px rgba(0,0,0,.025); + -moz-box-shadow: inset 0 5px 15px rgba(0,0,0,.025); +/* box-shadow: inset 0 5px 15px rgba(0,0,0,.025); +*/} +.footer p { + color: #555; +} + + +/* Quickstart section for getting le code +-------------------------------------------------- */ +.quickstart { + background-color: #f5f5f5; + background-repeat: repeat-x; + background-image: -khtml-gradient(linear, left top, left bottom, from(#f9f9f9), to(#f5f5f5)); + background-image: -moz-linear-gradient(#f9f9f9, #f5f5f5); + background-image: -ms-linear-gradient(#f9f9f9, #f5f5f5); + background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #f9f9f9), color-stop(100%, #f5f5f5)); + background-image: -webkit-linear-gradient(#f9f9f9, #f5f5f5); + background-image: -o-linear-gradient(#f9f9f9, #f5f5f5); + -ms-filter: "progid:DXImageTransform.Microsoft.gradient(startColorstr='#f9f9f9', endColorstr='#f5f5f5', GradientType=0)"; + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#f9f9f9', endColorstr='#f5f5f5', GradientType=0); + background-image: linear-gradient(#f9f9f9, #f5f5f5); + border-top: 1px solid #fff; + border-bottom: 1px solid #eee; +} +.quickstart .container { + margin-bottom: 0; +} +.quickstart .row { + margin: 0 -20px; + -webkit-box-shadow: 1px 0 0 #f9f9f9; + -moz-box-shadow: 1px 0 0 #f9f9f9; + box-shadow: 1px 0 0 #f9f9f9; +} +.quickstart [class*="span"] { + width: 285px; + height: 117px; + margin-left: 0; + padding: 17px 20px 26px; + border-left: 1px solid #eee; + -webkit-box-shadow: inset 1px 0 0 #f9f9f9; + -moz-box-shadow: inset 1px 0 0 #f9f9f9; + box-shadow: inset 1px 0 0 #f9f9f9; +} +.quickstart [class*="span"]:last-child { + border-right: 1px solid #eee; + width: 286px; +} +.quickstart h6, +.quickstart p { + line-height: 18px; + text-align: center; + margin-bottom: 9px; + color: #333; +} +.quickstart .current-version, +.quickstart .current-version a { + color: #999; +} +.quickstart h6 { + color: #999; +} +.quickstart textarea { + display: block; + width: 275px; + height: auto; + margin: 0 0 9px; + line-height: 21px; + white-space: nowrap; + overflow: hidden; +} + + +/* Special grid styles +-------------------------------------------------- */ +.show-grid { + margin-top: 10px; + margin-bottom: 10px; +} +.show-grid [class*="span"] { + background: #eee; + text-align: center; + -webkit-border-radius: 3px; + -moz-border-radius: 3px; + border-radius: 3px; + min-height: 30px; + line-height: 30px; +} +.show-grid:hover [class*="span"] { + background: #ddd; +} +.show-grid .show-grid { + margin-top: 0; + margin-bottom: 0; +} +.show-grid .show-grid [class*="span"] { + background-color: #ccc; +} + + +/* Render mini layout previews +-------------------------------------------------- */ +.mini-layout { + border: 1px solid #ddd; + -webkit-border-radius: 6px; + -moz-border-radius: 6px; + border-radius: 6px; + -webkit-box-shadow: 0 1px 2px rgba(0,0,0,.075); + -moz-box-shadow: 0 1px 2px rgba(0,0,0,.075); + box-shadow: 0 1px 2px rgba(0,0,0,.075); +} +.mini-layout { + height: 240px; + margin-bottom: 20px; + padding: 9px; +} +.mini-layout div { + -webkit-border-radius: 3px; + -moz-border-radius: 3px; + border-radius: 3px; +} +.mini-layout .mini-layout-body { + background-color: #dceaf4; + margin: 0 auto; + width: 240px; + height: 240px; +} +.mini-layout.fluid .mini-layout-sidebar, +.mini-layout.fluid .mini-layout-header, +.mini-layout.fluid .mini-layout-body { + float: left; +} +.mini-layout.fluid .mini-layout-sidebar { + background-color: #bbd8e9; + width: 90px; + height: 240px; +} +.mini-layout.fluid .mini-layout-body { + width: 300px; + margin-left: 10px; +} + + +/* Topbar special styles +-------------------------------------------------- */ +.topbar-wrapper { + position: relative; + height: 40px; + margin: 5px 0 15px; +} +.topbar-wrapper .topbar { + position: absolute; + margin: 0 -20px; +} +.topbar-wrapper .topbar .topbar-inner { + padding-left: 20px; + padding-right: 20px; + -webkit-border-radius: 4px; + -moz-border-radius: 4px; + border-radius: 4px; +} + +/* Topbar in js docs +------------------------- */ +#bootstrap-js .topbar-wrapper { + z-index: 1; +} +#bootstrap-js .topbar-wrapper .topbar { + position: absolute; + margin: 0 -20px; +} +#bootstrap-js .topbar-wrapper .topbar .topbar-inner { + padding-left: 20px; + padding-right: 20px; + -webkit-border-radius: 4px; + -moz-border-radius: 4px; + border-radius: 4px; +} +#bootstrap-js .topbar-wrapper .container { + width: auto; +} + + +/* Popover docs +-------------------------------------------------- */ +.popover-well { + min-height: 160px; +} +.popover-well .popover { + display: block; +} +.popover-well .popover-wrapper { + width: 50%; + height: 160px; + float: left; + margin-left: 55px; + position: relative; +} +.popover-well .popover-menu-wrapper { + height: 80px; +} +img.large-bird { + margin: 5px 0 0 310px; + opacity: .1; +} + +/* Pretty Print +-------------------------------------------------- */ +pre.prettyprint { + overflow: hidden; +} \ No newline at end of file diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/ico/bootstrap-apple-114x114.png b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/ico/bootstrap-apple-114x114.png new file mode 100644 index 0000000000..c434d9713d Binary files /dev/null and b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/ico/bootstrap-apple-114x114.png differ diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/ico/bootstrap-apple-57x57.png b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/ico/bootstrap-apple-57x57.png new file mode 100644 index 0000000000..13c2f7f37a Binary files /dev/null and b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/ico/bootstrap-apple-57x57.png differ diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/ico/bootstrap-apple-72x72.png b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/ico/bootstrap-apple-72x72.png new file mode 100644 index 0000000000..c4bfb622cf Binary files /dev/null and b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/ico/bootstrap-apple-72x72.png differ diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/ico/favicon.ico b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/ico/favicon.ico new file mode 100644 index 0000000000..3455c3fb99 Binary files /dev/null and b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/ico/favicon.ico differ diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/bird.png b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/bird.png new file mode 100644 index 0000000000..f0e6fcb51b Binary files /dev/null and b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/bird.png differ diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/browsers.png b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/browsers.png new file mode 100644 index 0000000000..369a7f838f Binary files /dev/null and b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/browsers.png differ diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/example-diagram-01.png b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/example-diagram-01.png new file mode 100644 index 0000000000..0cece3b3ea Binary files /dev/null and b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/example-diagram-01.png differ diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/example-diagram-02.png b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/example-diagram-02.png new file mode 100644 index 0000000000..557edd3112 Binary files /dev/null and b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/example-diagram-02.png differ diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/example-diagram-03.png b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/example-diagram-03.png new file mode 100644 index 0000000000..acf819d408 Binary files /dev/null and b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/example-diagram-03.png differ diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/grid-18px.png b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/grid-18px.png new file mode 100644 index 0000000000..68f9fe1b70 Binary files /dev/null and b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/grid-18px.png differ diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/twitter-logo-no-bird.png b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/twitter-logo-no-bird.png new file mode 100644 index 0000000000..70b6573d7e Binary files /dev/null and b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/img/twitter-logo-no-bird.png differ diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/js/application.js b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/js/application.js new file mode 100644 index 0000000000..5beba46e1f --- /dev/null +++ b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/js/application.js @@ -0,0 +1,52 @@ +$(document).ready(function(){ + + // table sort example + // ================== + + $("#sortTableExample").tablesorter( { sortList: [[ 1, 0 ]] } ) + + + // add on logic + // ============ + + $('.add-on :checkbox').click(function () { + if ($(this).attr('checked')) { + $(this).parents('.add-on').addClass('active') + } else { + $(this).parents('.add-on').removeClass('active') + } + }) + + + // Disable certain links in docs + // ============================= + // Please do not carry these styles over to your projects, it's merely here to prevent button clicks form taking you away from your spot on page + + $('ul.tabs a, ul.pills a, .pagination a, .well .btn, .actions .btn, .alert-message .btn, a.close').click(function (e) { + e.preventDefault() + }) + + // Copy code blocks in docs + $(".copy-code").focus(function () { + var el = this; + // push select to event loop for chrome :{o + setTimeout(function () { $(el).select(); }, 0); + }); + + + // POSITION STATIC TWIPSIES + // ======================== + + $(window).bind( 'load resize', function () { + $(".twipsies a").each(function () { + $(this) + .twipsy({ + live: false + , placement: $(this).attr('title') + , trigger: 'manual' + , offset: 2 + }) + .twipsy('show') + }) + }) +}); diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/js/google-code-prettify/prettify.css b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/js/google-code-prettify/prettify.css new file mode 100644 index 0000000000..f9fd622a1c --- /dev/null +++ b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/js/google-code-prettify/prettify.css @@ -0,0 +1,94 @@ +.com { color: #93a1a1; } +.lit { color: #195f91; } +.pun, .opn, .clo { color: #93a1a1; } +.fun { color: #dc322f; } +.str, .atv { color: #268bd2; } +.kwd, .tag { color: #195f91; } +.typ, .atn, .dec, .var { color: #CB4B16; } +.pln { color: #93a1a1; } +.prettyprint { + background-color: #fefbf3; + padding: 9px; + border: 1px solid rgba(0,0,0,.2); + -webkit-box-shadow: 0 1px 2px rgba(0,0,0,.1); + -moz-box-shadow: 0 1px 2px rgba(0,0,0,.1); + box-shadow: 0 1px 2px rgba(0,0,0,.1); +} + +/* Specify class=linenums on a pre to get line numbering */ +ol.linenums { + margin: 0 0 0 40px; +} +/* IE indents via margin-left */ +ol.linenums li { + padding: 0 5px; + color: rgba(0,0,0,.15); + line-height: 20px; + -webkit-border-radius: 2px; + -moz-border-radius: 2px; + border-radius: 2px; +} +/* Alternate shading for lines */ +li.L1, li.L3, li.L5, li.L7, li.L9 { } + +/* +$base03: #002b36; +$base02: #073642; +$base01: #586e75; +$base00: #657b83; +$base0: #839496; +$base1: #93a1a1; +$base2: #eee8d5; +$base3: #fdf6e3; +$yellow: #b58900; +$orange: #cb4b16; +$red: #dc322f; +$magenta: #d33682; +$violet: #6c71c4; +$blue: #268bd2; +$cyan: #2aa198; +$green: #859900; +*/ + + +/* +#1d1f21 Background +#282a2e Current Line +#373b41 Selection +#c5c8c6 Foreground +#969896 Comment +#cc6666 Red +#de935f Orange +#f0c674 Yellow +#b5bd68 Green +#8abeb7 Aqua +#81a2be Blue +#b294bb Purple +*/ + + +/* DARK THEME */ +/* ---------- */ + +.prettyprint-dark { + background-color: #1d1f21; + border: 0; + padding: 10px; +} +.prettyprint-dark .linenums li { + color: #444; +} +.prettyprint-dark .linenums li:hover { + background-color: #282a2e; +} +/* tags in html */ +.prettyprint-dark .kwd, +.prettyprint-dark .tag { color: #cc6666; } +/* html attr */ +.prettyprint-dark .typ, +.prettyprint-dark .atn, +.prettyprint-dark .dec, +.prettyprint-dark .var { color: #de935f; } +/* html attr values */ +.prettyprint-dark .str, +.prettyprint-dark .atv { color: #b5bd68; } diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/js/google-code-prettify/prettify.js b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/js/google-code-prettify/prettify.js new file mode 100644 index 0000000000..eef5ad7e6a --- /dev/null +++ b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/assets/js/google-code-prettify/prettify.js @@ -0,0 +1,28 @@ +var q=null;window.PR_SHOULD_USE_CONTINUATION=!0; +(function(){function L(a){function m(a){var f=a.charCodeAt(0);if(f!==92)return f;var b=a.charAt(1);return(f=r[b])?f:"0"<=b&&b<="7"?parseInt(a.substring(1),8):b==="u"||b==="x"?parseInt(a.substring(2),16):a.charCodeAt(1)}function e(a){if(a<32)return(a<16?"\\x0":"\\x")+a.toString(16);a=String.fromCharCode(a);if(a==="\\"||a==="-"||a==="["||a==="]")a="\\"+a;return a}function h(a){for(var f=a.substring(1,a.length-1).match(/\\u[\dA-Fa-f]{4}|\\x[\dA-Fa-f]{2}|\\[0-3][0-7]{0,2}|\\[0-7]{1,2}|\\[\S\s]|[^\\]/g),a= +[],b=[],o=f[0]==="^",c=o?1:0,i=f.length;c122||(d<65||j>90||b.push([Math.max(65,j)|32,Math.min(d,90)|32]),d<97||j>122||b.push([Math.max(97,j)&-33,Math.min(d,122)&-33]))}}b.sort(function(a,f){return a[0]-f[0]||f[1]-a[1]});f=[];j=[NaN,NaN];for(c=0;ci[0]&&(i[1]+1>i[0]&&b.push("-"),b.push(e(i[1])));b.push("]");return b.join("")}function y(a){for(var f=a.source.match(/\[(?:[^\\\]]|\\[\S\s])*]|\\u[\dA-Fa-f]{4}|\\x[\dA-Fa-f]{2}|\\\d+|\\[^\dux]|\(\?[!:=]|[()^]|[^()[\\^]+/g),b=f.length,d=[],c=0,i=0;c=2&&a==="["?f[c]=h(j):a!=="\\"&&(f[c]=j.replace(/[A-Za-z]/g,function(a){a=a.charCodeAt(0);return"["+String.fromCharCode(a&-33,a|32)+"]"}));return f.join("")}for(var t=0,s=!1,l=!1,p=0,d=a.length;p=5&&"lang-"===b.substring(0,5))&&!(o&&typeof o[1]==="string"))c=!1,b="src";c||(r[f]=b)}i=d;d+=f.length;if(c){c=o[1];var j=f.indexOf(c),k=j+c.length;o[2]&&(k=f.length-o[2].length,j=k-c.length);b=b.substring(5);B(l+i,f.substring(0,j),e,p);B(l+i+j,c,C(b,c),p);B(l+i+k,f.substring(k),e,p)}else p.push(l+i,b)}a.e=p}var h={},y;(function(){for(var e=a.concat(m), +l=[],p={},d=0,g=e.length;d=0;)h[n.charAt(k)]=r;r=r[1];n=""+r;p.hasOwnProperty(n)||(l.push(r),p[n]=q)}l.push(/[\S\s]/);y=L(l)})();var t=m.length;return e}function u(a){var m=[],e=[];a.tripleQuotedStrings?m.push(["str",/^(?:'''(?:[^'\\]|\\[\S\s]|''?(?=[^']))*(?:'''|$)|"""(?:[^"\\]|\\[\S\s]|""?(?=[^"]))*(?:"""|$)|'(?:[^'\\]|\\[\S\s])*(?:'|$)|"(?:[^"\\]|\\[\S\s])*(?:"|$))/,q,"'\""]):a.multiLineStrings?m.push(["str",/^(?:'(?:[^'\\]|\\[\S\s])*(?:'|$)|"(?:[^"\\]|\\[\S\s])*(?:"|$)|`(?:[^\\`]|\\[\S\s])*(?:`|$))/, +q,"'\"`"]):m.push(["str",/^(?:'(?:[^\n\r'\\]|\\.)*(?:'|$)|"(?:[^\n\r"\\]|\\.)*(?:"|$))/,q,"\"'"]);a.verbatimStrings&&e.push(["str",/^@"(?:[^"]|"")*(?:"|$)/,q]);var h=a.hashComments;h&&(a.cStyleComments?(h>1?m.push(["com",/^#(?:##(?:[^#]|#(?!##))*(?:###|$)|.*)/,q,"#"]):m.push(["com",/^#(?:(?:define|elif|else|endif|error|ifdef|include|ifndef|line|pragma|undef|warning)\b|[^\n\r]*)/,q,"#"]),e.push(["str",/^<(?:(?:(?:\.\.\/)*|\/?)(?:[\w-]+(?:\/[\w-]+)+)?[\w-]+\.h|[a-z]\w*)>/,q])):m.push(["com",/^#[^\n\r]*/, +q,"#"]));a.cStyleComments&&(e.push(["com",/^\/\/[^\n\r]*/,q]),e.push(["com",/^\/\*[\S\s]*?(?:\*\/|$)/,q]));a.regexLiterals&&e.push(["lang-regex",/^(?:^^\.?|[!+-]|!=|!==|#|%|%=|&|&&|&&=|&=|\(|\*|\*=|\+=|,|-=|->|\/|\/=|:|::|;|<|<<|<<=|<=|=|==|===|>|>=|>>|>>=|>>>|>>>=|[?@[^]|\^=|\^\^|\^\^=|{|\||\|=|\|\||\|\|=|~|break|case|continue|delete|do|else|finally|instanceof|return|throw|try|typeof)\s*(\/(?=[^*/])(?:[^/[\\]|\\[\S\s]|\[(?:[^\\\]]|\\[\S\s])*(?:]|$))+\/)/]);(h=a.types)&&e.push(["typ",h]);a=(""+a.keywords).replace(/^ | $/g, +"");a.length&&e.push(["kwd",RegExp("^(?:"+a.replace(/[\s,]+/g,"|")+")\\b"),q]);m.push(["pln",/^\s+/,q," \r\n\t\xa0"]);e.push(["lit",/^@[$_a-z][\w$@]*/i,q],["typ",/^(?:[@_]?[A-Z]+[a-z][\w$@]*|\w+_t\b)/,q],["pln",/^[$_a-z][\w$@]*/i,q],["lit",/^(?:0x[\da-f]+|(?:\d(?:_\d+)*\d*(?:\.\d*)?|\.\d\+)(?:e[+-]?\d+)?)[a-z]*/i,q,"0123456789"],["pln",/^\\[\S\s]?/,q],["pun",/^.[^\s\w"-$'./@\\`]*/,q]);return x(m,e)}function D(a,m){function e(a){switch(a.nodeType){case 1:if(k.test(a.className))break;if("BR"===a.nodeName)h(a), +a.parentNode&&a.parentNode.removeChild(a);else for(a=a.firstChild;a;a=a.nextSibling)e(a);break;case 3:case 4:if(p){var b=a.nodeValue,d=b.match(t);if(d){var c=b.substring(0,d.index);a.nodeValue=c;(b=b.substring(d.index+d[0].length))&&a.parentNode.insertBefore(s.createTextNode(b),a.nextSibling);h(a);c||a.parentNode.removeChild(a)}}}}function h(a){function b(a,d){var e=d?a.cloneNode(!1):a,f=a.parentNode;if(f){var f=b(f,1),g=a.nextSibling;f.appendChild(e);for(var h=g;h;h=g)g=h.nextSibling,f.appendChild(h)}return e} +for(;!a.nextSibling;)if(a=a.parentNode,!a)return;for(var a=b(a.nextSibling,0),e;(e=a.parentNode)&&e.nodeType===1;)a=e;d.push(a)}var k=/(?:^|\s)nocode(?:\s|$)/,t=/\r\n?|\n/,s=a.ownerDocument,l;a.currentStyle?l=a.currentStyle.whiteSpace:window.getComputedStyle&&(l=s.defaultView.getComputedStyle(a,q).getPropertyValue("white-space"));var p=l&&"pre"===l.substring(0,3);for(l=s.createElement("LI");a.firstChild;)l.appendChild(a.firstChild);for(var d=[l],g=0;g=0;){var h=m[e];A.hasOwnProperty(h)?window.console&&console.warn("cannot override language handler %s",h):A[h]=a}}function C(a,m){if(!a||!A.hasOwnProperty(a))a=/^\s*=o&&(h+=2);e>=c&&(a+=2)}}catch(w){"console"in window&&console.log(w&&w.stack?w.stack:w)}}var v=["break,continue,do,else,for,if,return,while"],w=[[v,"auto,case,char,const,default,double,enum,extern,float,goto,int,long,register,short,signed,sizeof,static,struct,switch,typedef,union,unsigned,void,volatile"], +"catch,class,delete,false,import,new,operator,private,protected,public,this,throw,true,try,typeof"],F=[w,"alignof,align_union,asm,axiom,bool,concept,concept_map,const_cast,constexpr,decltype,dynamic_cast,explicit,export,friend,inline,late_check,mutable,namespace,nullptr,reinterpret_cast,static_assert,static_cast,template,typeid,typename,using,virtual,where"],G=[w,"abstract,boolean,byte,extends,final,finally,implements,import,instanceof,null,native,package,strictfp,super,synchronized,throws,transient"], +H=[G,"as,base,by,checked,decimal,delegate,descending,dynamic,event,fixed,foreach,from,group,implicit,in,interface,internal,into,is,lock,object,out,override,orderby,params,partial,readonly,ref,sbyte,sealed,stackalloc,string,select,uint,ulong,unchecked,unsafe,ushort,var"],w=[w,"debugger,eval,export,function,get,null,set,undefined,var,with,Infinity,NaN"],I=[v,"and,as,assert,class,def,del,elif,except,exec,finally,from,global,import,in,is,lambda,nonlocal,not,or,pass,print,raise,try,with,yield,False,True,None"], +J=[v,"alias,and,begin,case,class,def,defined,elsif,end,ensure,false,in,module,next,nil,not,or,redo,rescue,retry,self,super,then,true,undef,unless,until,when,yield,BEGIN,END"],v=[v,"case,done,elif,esac,eval,fi,function,in,local,set,then,until"],K=/^(DIR|FILE|vector|(de|priority_)?queue|list|stack|(const_)?iterator|(multi)?(set|map)|bitset|u?(int|float)\d*)/,N=/\S/,O=u({keywords:[F,H,w,"caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END"+ +I,J,v],hashComments:!0,cStyleComments:!0,multiLineStrings:!0,regexLiterals:!0}),A={};k(O,["default-code"]);k(x([],[["pln",/^[^]*(?:>|$)/],["com",/^<\!--[\S\s]*?(?:--\>|$)/],["lang-",/^<\?([\S\s]+?)(?:\?>|$)/],["lang-",/^<%([\S\s]+?)(?:%>|$)/],["pun",/^(?:<[%?]|[%?]>)/],["lang-",/^]*>([\S\s]+?)<\/xmp\b[^>]*>/i],["lang-js",/^]*>([\S\s]*?)(<\/script\b[^>]*>)/i],["lang-css",/^]*>([\S\s]*?)(<\/style\b[^>]*>)/i],["lang-in.tag",/^(<\/?[a-z][^<>]*>)/i]]), +["default-markup","htm","html","mxml","xhtml","xml","xsl"]);k(x([["pln",/^\s+/,q," \t\r\n"],["atv",/^(?:"[^"]*"?|'[^']*'?)/,q,"\"'"]],[["tag",/^^<\/?[a-z](?:[\w-.:]*\w)?|\/?>$/i],["atn",/^(?!style[\s=]|on)[a-z](?:[\w:-]*\w)?/i],["lang-uq.val",/^=\s*([^\s"'>]*(?:[^\s"'/>]|\/(?=\s)))/],["pun",/^[/<->]+/],["lang-js",/^on\w+\s*=\s*"([^"]+)"/i],["lang-js",/^on\w+\s*=\s*'([^']+)'/i],["lang-js",/^on\w+\s*=\s*([^\s"'>]+)/i],["lang-css",/^style\s*=\s*"([^"]+)"/i],["lang-css",/^style\s*=\s*'([^']+)'/i],["lang-css", +/^style\s*=\s*([^\s"'>]+)/i]]),["in.tag"]);k(x([],[["atv",/^[\S\s]+/]]),["uq.val"]);k(u({keywords:F,hashComments:!0,cStyleComments:!0,types:K}),["c","cc","cpp","cxx","cyc","m"]);k(u({keywords:"null,true,false"}),["json"]);k(u({keywords:H,hashComments:!0,cStyleComments:!0,verbatimStrings:!0,types:K}),["cs"]);k(u({keywords:G,cStyleComments:!0}),["java"]);k(u({keywords:v,hashComments:!0,multiLineStrings:!0}),["bsh","csh","sh"]);k(u({keywords:I,hashComments:!0,multiLineStrings:!0,tripleQuotedStrings:!0}), +["cv","py"]);k(u({keywords:"caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END",hashComments:!0,multiLineStrings:!0,regexLiterals:!0}),["perl","pl","pm"]);k(u({keywords:J,hashComments:!0,multiLineStrings:!0,regexLiterals:!0}),["rb"]);k(u({keywords:w,cStyleComments:!0,regexLiterals:!0}),["js"]);k(u({keywords:"all,and,by,catch,class,else,extends,false,finally,for,if,in,is,isnt,loop,new,no,not,null,of,off,on,or,return,super,then,true,try,unless,until,when,while,yes", +hashComments:3,cStyleComments:!0,multilineStrings:!0,tripleQuotedStrings:!0,regexLiterals:!0}),["coffee"]);k(x([],[["str",/^[\S\s]+/]]),["regex"]);window.prettyPrintOne=function(a,m,e){var h=document.createElement("PRE");h.innerHTML=a;e&&D(h,e);E({g:m,i:e,h:h});return h.innerHTML};window.prettyPrint=function(a){function m(){for(var e=window.PR_SHOULD_USE_CONTINUATION?l.now()+250:Infinity;p=0){var k=k.match(g),f,b;if(b= +!k){b=n;for(var o=void 0,c=b.firstChild;c;c=c.nextSibling)var i=c.nodeType,o=i===1?o?b:c:i===3?N.test(c.nodeValue)?b:o:o;b=(f=o===b?void 0:o)&&"CODE"===f.tagName}b&&(k=f.className.match(g));k&&(k=k[1]);b=!1;for(o=n.parentNode;o;o=o.parentNode)if((o.tagName==="pre"||o.tagName==="code"||o.tagName==="xmp")&&o.className&&o.className.indexOf("prettyprint")>=0){b=!0;break}b||((b=(b=n.className.match(/\blinenums\b(?::(\d+))?/))?b[1]&&b[1].length?+b[1]:!0:!1)&&D(n,b),d={g:k,h:n,i:b},E(d))}}p + + + + Bootstrap, from Twitter + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    +
    +

    Bootstrap, from Twitter

    +

    + Bootstrap is a toolkit from Twitter designed to kickstart development of webapps and sites.
    + It includes base CSS and HTML for typography, forms, buttons, tables, grids, navigation, and more.
    +

    +

    Nerd alert: Bootstrap is built with Less and was designed to work out of the gate with modern browsers in mind.

    +
    +
    +
    + + +
    +
    +
    +
    +
    Hotlink the CSS
    +

    For the quickest and easiest start, just copy this snippet into your webpage.

    +
    + + +
    +
    +
    Use it with Less
    +

    A fan of using Less? No problem, just clone the repo and add these lines:

    +
    + + +
    +
    +
    Fork on GitHub
    +

    Download, fork, pull, file issues, and more with the official Bootstrap repo on Github.

    +

    Bootstrap on GitHub »

    +

    Currently v1.4.0

    +
    +
    +
    +
    + +
    + + + + +
    + +
    +
    +

    History

    +

    Engineers at Twitter have historically used almost any library they were familiar with to meet front-end requirements. Bootstrap began as an answer to the challenges that presented. With the help of many awesome folks, Bootstrap has grown significantly.

    +

    Read more on dev.twitter.com ›

    +

    + + +

    +
    +
    +

    Browser support

    +

    Bootstrap is tested and supported in major modern browsers like Chrome, Safari, Internet Explorer, and Firefox.

    + Tested and supported in Chrome, Safari, Internet Explorer, and Firefox +
      +
    • Latest Safari
    • +
    • Latest Google Chrome
    • +
    • Firefox 4+
    • +
    • Internet Explorer 7+
    • +
    • Opera 11
    • +
    +
    +
    +

    What's included

    +

    Bootstrap comes complete with compiled CSS, uncompiled, and example templates.

    + +
    +
    + +
    +
    +

    Quick-start examples

    +

    Need some quick templates? Check out these basic examples we've put together:

    +
      +
    • + Simple three-column layout with hero unit +
    • +
    • + Fluid layout with static sidebar +
    • +
    • + Simple hanging container for apps +
    • +
    +
    +
    +
    + + + + +
    + +
    +
    +

    Default grid

    +

    The default grid system provided as part of Bootstrap is a 940px wide 16-column grid. It’s a flavor of the popular 960 grid system, but without the additional margin/padding on the left and right sides.

    +
    +
    +

    Example grid markup

    +

    As shown here, a basic layout can be created with two "columns," each spanning a number of the 16 foundational columns we defined as part of our grid system. See the examples below for more variations.

    +
    +<div class="row">
    +  <div class="span6">
    +    ...
    +  </div>
    +  <div class="span10">
    +    ...
    +  </div>
    +</div>
    +
    +
    +
    +
    +
    1
    +
    1
    +
    1
    +
    1
    +
    1
    +
    1
    +
    1
    +
    1
    +
    1
    +
    1
    +
    1
    +
    1
    +
    1
    +
    1
    +
    1
    +
    1
    +
    +
    +
    2
    +
    2
    +
    2
    +
    2
    +
    2
    +
    2
    +
    2
    +
    2
    +
    +
    +
    3
    +
    3
    +
    3
    +
    3
    +
    3
    +
    1
    +
    +
    +
    4
    +
    4
    +
    4
    +
    4
    +
    +
    +
    1/3
    +
    1/3
    +
    1/3
    +
    +
    +
    1/3
    +
    2/3
    +
    +
    +
    4
    +
    6
    +
    6
    +
    +
    +
    8
    +
    8
    +
    +
    +
    5
    +
    11
    +
    +
    +
    16
    +
    + +
    + +

    Offsetting columns

    +
    +
    4
    +
    8 offset 4
    +
    +
    +
    1/3 offset 2/3s
    +
    +
    +
    4 offset 4
    +
    4 offset 4
    +
    +
    +
    5 offset 3
    +
    5 offset 3
    +
    +
    +
    10 offset 6
    +
    + +
    + +
    +
    +

    Nesting columns

    +

    Nest your content if you must by creating a .row within an existing column.

    +
    +
    +

    Example of nested columns

    +
    +
    + Level 1 of column +
    +
    + Level 2 +
    +
    + Level 2 +
    +
    +
    +
    +
    +<div class="row">
    +  <div class="span12">
    +    Level 1 of column
    +    <div class="row">
    +      <div class="span6">
    +        Level 2
    +      </div>
    +      <div class="span6">
    +        Level 2
    +      </div>
    +    </div>
    +  </div>
    +</div>
    +
    +
    +
    + +
    + +
    +
    +

    Roll your own grid

    +

    Built into Bootstrap are a handful of variables for customizing the default 940px grid system. With a bit of customization, you can modify the size of columns, their gutters, and the container they reside in.

    +
    +
    +

    Inside the grid

    +

    The variables needed to modify the grid system currently all reside in variables.less.

    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    VariableDefault valueDescription
    @gridColumns16The number of columns within the grid
    @gridColumnWidth40pxThe width of each column within the grid
    @gridGutterWidth20pxThe negative space between each column
    @siteWidthComputed sum of all columns and guttersWe use some basic match to count the number of columns and gutters and set the width of the .fixed-container() mixin.
    +

    Now to customize

    +

    Modifying the grid means changing the three @grid-* variables and recompiling the Less files.

    +

    Bootstrap comes equipped to handle a grid system with up to 24 columns; the default is just 16. Here's how your grid variables would look customized to a 24-column grid.

    +
    @gridColumns:       24;
    +@gridColumnWidth:   20px;
    +@gridGutterWidth:   20px;
    +

    Once recompiled, you'll be set!

    +
    +
    + + + + + +
    + + +
    +
    +

    Fixed layout

    +

    The default and simple 940px-wide, centered layout for just about any website or page provided by a single <div.container>.

    +
    +
    +
    +
    +<body>
    +  <div class="container">
    +    ...
    +  </div>
    +</body>
    +
    +
    +
    +

    Fluid layout

    +

    An alternative, flexible fluid page structure with min- and max-widths and a left-hand sidebar. Great for apps and docs.

    +
    +
    +
    +
    +
    +<body>
    +  <div class="container-fluid">
    +    <div class="sidebar">
    +      ...
    +    </div>
    +    <div class="content">
    +      ...
    +    </div>
    +  </div>
    +</body>
    +
    +
    +
    +
    + + + + +
    + + + +
    +
    +

    Headings & copy

    +

    A standard typographic hierarchy for structuring your webpages.

    +

    The entire typographic grid is based on two Less variables in our variables.less file: @basefont and @baseline. The first is the base font-size used throughout and the second is the base line-height.

    +

    We use those variables, and some math, to create the margins, paddings, and line-heights of all our type and more.

    +
    +
    +

    h1. Heading 1

    +

    h2. Heading 2

    +

    h3. Heading 3

    +

    h4. Heading 4

    +
    h5. Heading 5
    +
    h6. Heading 6
    +
    +
    +

    Example paragraph

    +

    Nullam quis risus eget urna mollis ornare vel eu leo. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nullam id dolor id nibh ultricies vehicula ut id elit.

    +

    Example heading Has sub-heading…

    +
    +
    + + +
    +
    +

    Misc. elements

    +

    Using emphasis, addresses, & abbreviations

    +

    + <strong> + <em> + <address> + <abbr> +

    +
    +
    +

    When to use

    +

    Emphasis tags (<strong> and <em>) should be used to indicate additional importance or emphasis of a word or phrase relative to its surrounding copy. Use <strong> for importance and <em> for stress emphasis.

    +

    Emphasis in a paragraph

    +

    Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Maecenas faucibus mollis interdum. Nulla vitae elit libero, a pharetra augue.

    +

    Note: It's still okay to use <b> and <i> tags in HTML5 and they don't have to be styled bold and italic, respectively (although if there is a more semantic element, use it). <b> is meant to highlight words or phrases without conveying additional importance, while <i> is mostly for voice, technical terms, etc.

    +

    Addresses

    +

    The <address> element is used for contact information for its nearest ancestor, or the entire body of work. Here are two examples of how it could be used:

    + +
    +
    +
    + Twitter, Inc.
    + 795 Folsom Ave, Suite 600
    + San Francisco, CA 94107
    + P: (123) 456-7890 +
    +
    +
    +
    + Full Name
    + first.last@gmail.com +
    +
    +
    + +

    Note: Each line in an <address> must end with a line-break (<br />) or be wrapped in a block-level tag (e.g., <p>) to properly structure the content.

    +

    Abbreviations

    +

    For abbreviations and acronyms, use the <abbr> tag (<acronym> is deprecated in HTML5). Put the shorthand form within the tag and set a title for the complete name.

    +
    +
    + + +
    +
    +

    Blockquotes

    +

    + <blockquote> + <p> + <small> +

    +
    +
    +

    How to quote

    +

    To include a blockquote, wrap <blockquote> around <p> and <small> tags. Use the <small> element to cite your source and you'll get an em dash &mdash; before it.

    +
    +

    Lorem ipsum dolor sit amet, consectetur adipiscing elit. Integer posuere erat a ante venenatis dapibus posuere velit aliquet.

    + Dr. Julius Hibbert +
    +
    +<blockquote>
    +  <p>Lorem ipsum dolor sit amet, consectetur adipiscing elit. Integer posuere erat a ante venenatis dapibus posuere velit aliquet.</p>
    +  <small>Dr. Julius Hibbert</small>
    +</blockquote>
    +
    +
    +
    + +

    Lists

    +
    +
    +

    Unordered <ul>

    +
      +
    • Lorem ipsum dolor sit amet
    • +
    • Consectetur adipiscing elit
    • +
    • Integer molestie lorem at massa
    • +
    • Facilisis in pretium nisl aliquet
    • +
    • Nulla volutpat aliquam velit +
        +
      • Phasellus iaculis neque
      • +
      • Purus sodales ultricies
      • +
      • Vestibulum laoreet porttitor sem
      • +
      • Ac tristique libero volutpat at
      • +
      +
    • +
    • Faucibus porta lacus fringilla vel
    • +
    • Aenean sit amet erat nunc
    • +
    • Eget porttitor lorem
    • +
    +
    +
    +

    Unstyled <ul.unstyled>

    +
      +
    • Lorem ipsum dolor sit amet
    • +
    • Consectetur adipiscing elit
    • +
    • Integer molestie lorem at massa
    • +
    • Facilisis in pretium nisl aliquet
    • +
    • Nulla volutpat aliquam velit +
        +
      • Phasellus iaculis neque
      • +
      • Purus sodales ultricies
      • +
      • Vestibulum laoreet porttitor sem
      • +
      • Ac tristique libero volutpat at
      • +
      +
    • +
    • Faucibus porta lacus fringilla vel
    • +
    • Aenean sit amet erat nunc
    • +
    • Eget porttitor lorem
    • +
    +
    +
    +

    Ordered <ol>

    +
      +
    1. Lorem ipsum dolor sit amet
    2. +
    3. Consectetur adipiscing elit
    4. +
    5. Integer molestie lorem at massa
    6. +
    7. Facilisis in pretium nisl aliquet
    8. +
    9. Nulla volutpat aliquam velit
    10. +
    11. Faucibus porta lacus fringilla vel
    12. +
    13. Aenean sit amet erat nunc
    14. +
    15. Eget porttitor lorem
    16. +
    +
    +
    +

    Description dl

    +
    +
    Description lists
    +
    A description list is perfect for defining terms.
    +
    Euismod
    +
    Vestibulum id ligula porta felis euismod semper eget lacinia odio sem nec elit.
    +
    Donec id elit non mi porta gravida at eget metus.
    +
    Malesuada porta
    +
    Etiam porta sem malesuada magna mollis euismod.
    +
    +
    +
    + + + +
    +
    +

    Code

    +

    + <code> + <pre> +

    +

    Pimp your code in style with two simple tags. For even more awesomeness through javascript, drop in Google's code prettify library and you're set.

    +
    +
    +

    Presenting code

    +

    Code, blocks of or just snippets inline, can be displayed with style just by wrapping in the right tag. For blocks of code spanning multiple lines, use the <pre> element. For inline code, use the <code> element.

    + + + + + + + + + + + + + + + + + + + + + +
    ElementResult
    <code>In a line of text like this, your wrapped code will look like this <html> element.
    <pre> +
    <div>
    +  <h1>Heading</h1>
    +  <p>Something right here...</p>
    +</div>
    +

    Note: Be sure to keep code within <pre> tags as close to the left as possible; it will render all tabs.

    +
    <pre class="prettyprint"> +

    Using the google-code-prettify library, your blocks of code get a slightly different visual style and automatic syntax highlighting.

    +
    <div>
    +  <h1>Heading</h1>
    +  <p>Something right here...</p>
    +</div>
    +

    Download google-code-prettify and view the readme for how to use.

    +
    +
    +
    + + +
    +
    +

    Inline labels

    +

    + <span class="label"> +

    +

    Call attention to or flag any phrase in your body text.

    +
    +
    +

    Label anything

    +

    Ever needed one of those fancy New! or Important flags when writing code? Well, now you have them. Here's what's included by default:

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    LabelResult
    + <span class="label">Default</span> + + Default +
    + <span class="label success">New</span> + + New +
    + <span class="label warning">Warning</span> + + Warning +
    + <span class="label important">Important</span> + + Important +
    + <span class="label notice">Notice</span> + + Notice +
    +
    +
    + +
    + + + + +
    + + +
    +
    +

    Media grid

    +

    Display thumbnails of varying sizes on pages with a low HTML footprint and minimal styles.

    +
    +
    +

    Example thumbnails

    +

    Thumbnails in the .media-grid can be any size, but they work best when mapped directly to the built-in Bootstrap grid system. Image widths like 90, 210, and 330 combine with a few pixels of padding to equal the .span2, .span4, and .span6 column sizes.

    +

    Large

    + +

    Medium

    + +

    Small

    + +

    Coding them

    +

    Media grids are easy to use and rather simple on the markup side. Their dimensions are purely based on the size of the images included.

    +
    +<ul class="media-grid">
    +  <li>
    +    <a href="#">
    +      <img class="thumbnail" src="http://placehold.it/330x230" alt="">
    +    </a>
    +  </li>
    +  <li>
    +    <a href="#">
    +      <img class="thumbnail" src="http://placehold.it/330x230" alt="">
    +    </a>
    +  </li>
    +</ul>
    +
    +
    +
    +
    + + + + +
    + + +
    +
    +

    Building tables

    +

    + <table> + <thead> + <tbody> + <tr> + <th> + <td> + <colspan> + <caption> +

    +

    Tables are great—for a lot of things. Great tables, however, need a bit of markup love to be useful, scalable, and readable (at the code level). Here are a few tips to help.

    +

    Always wrap your column headers in a <thead> such that hierarchy is <thead> > <tr> > <th>.

    +

    Similar to the column headers, all your table’s body content should be wrapped in a <tbody> so your hierarchy is <tbody> > <tr> > <td>.

    +
    +
    +

    Example: Default table styles

    +

    All tables will be automatically styled with only the essential borders to ensure readability and maintain structure. No need to add extra classes or attributes.

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    #First NameLast NameLanguage
    1SomeOneEnglish
    2JoeSixpackEnglish
    3StuDentCode
    +
    +<table>
    +  ...
    +</table>
    +

    Example: Condensed table

    +

    For tables that require more data in tighter spaces, use the condensed flavor that cuts padding in half. It can also be used in conjunction with borders and zebra-stripes, just like the default table styles.

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    #First NameLast NameLanguage
    1SomeOneEnglish
    2JoeSixpackEnglish
    3StuDentCode
    + +

    Example: Bordered table

    +

    Make your tables look just a wee bit sleeker by rounding their corners and adding borders on all sides.

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    #First NameLast NameLanguage
    1SomeOneEnglish
    2JoeSixpackEnglish
    3StuDentCode
    +
    +<table class="bordered-table">
    +  ...
    +</table>
    +

    Example: Zebra-striped

    +

    Get a little fancy with your tables by adding zebra-striping—just add the .zebra-striped class.

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    #First NameLast NameLanguage
    1SomeOneEnglish
    2JoeSixpackEnglish
    3StuDentCode
    + span 4 columns +
    + span 2 columns + + span 2 columns +
    +

    Note: Zebra-striping is a progressive enhancement not available for older browsers like IE8 and below.

    +
    +<table class="zebra-striped">
    +  ...
    +</table>
    +

    Example: Zebra-striped w/ TableSorter.js

    +

    Taking the previous example, we improve the usefulness of our tables by providing sorting functionality via jQuery and the Tablesorter plugin. Click any column’s header to change the sort.

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    #First NameLast NameLanguage
    1YourOneEnglish
    2JoeSixpackEnglish
    3StuDentCode
    +
    +<script src="js/jquery/jquery.tablesorter.min.js"></script>
    +<script >
    +  $(function() {
    +    $("table#sortTableExample").tablesorter({ sortList: [[1,0]] });
    +  });
    +</script>
    +<table class="zebra-striped">
    +  ...
    +</table>
    +
    +
    +
    + + + + +
    + +
    +
    +

    Default styles

    +

    All forms are given default styles to present them in a readable and scalable way. Styles are provided for text inputs, select lists, textareas, radio buttons and checkboxes, and buttons.

    +
    +
    +
    +
    + Example form legend +
    + +
    + +
    +
    +
    + +
    + +
    +
    +
    + +
    + +
    +
    +
    + +
    + +
    +
    +
    + +
    + Some value here +
    +
    +
    + +
    + +
    +
    +
    + +
    + +
    +
    +
    + +
    + + Small snippet of help text +
    +
    +
    + +
    + + Success! +
    +
    +
    + +
    + + Ruh roh! +
    +
    +
    +
    + Example form legend +
    + +
    +
    + @ + +
    + Here's some help text +
    +
    +
    + +
    +
    + + +
    +
    +
    +
    + +
    +
    + + +
    +
    +
    +
    + +
    + +
    +
    +
    +
    + Example form legend +
    + +
    +
      +
    • + +
    • +
    • + +
    • +
    • + +
    • +
    • + +
    • +
    + + Note: Labels surround all the options for much larger click areas and a more usable form. + +
    +
    +
    + +
    +
    + + + to + + + All times are shown as Pacific Standard Time (GMT -08:00). +
    +
    +
    +
    + +
    + + + Block of help text to describe the field above if need be. + +
    +
    +
    + +
    +
      +
    • + +
    • +
    • + +
    • +
    +
    +
    +
    +   +
    +
    +
    +
    +
    + +
    + +
    +
    +

    Stacked forms

    +

    Add .form-stacked to your form’s HTML and you’ll have labels on top of their fields instead of to their left. This works great if your forms are short or you have two columns of inputs for heavier forms.

    +
    +
    +
    +
    + Example form legend +
    + +
    + +
    +
    +
    + +
    + +
    +
    +
    +
    + Example form legend +
    + +
    + + Small snippet of help text +
    +
    +
    + +
    +
      +
    • + +
    • +
    • + +
    • +
    + + Note: Labels surround all the options for much larger click areas and a more usable form. + +
    +
    +
    +
    +   +
    +
    +
    +
    + +
    +
    +

    Form field sizes

    +

    Customize any form input, select, or textarea width by adding just a few classes to your markup.

    +

    As of v1.3.0, we have added the grid-based sizing classes for form elements. Please use the these over the existing .mini, .small, etc classes.

    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + +
    +
    +

    Buttons

    +

    As a convention, buttons are used for actions while links are used for objects. For instance, "Download" could be a button and "recent activity" could be a link.

    +

    All buttons default to a light gray style, but a number of functional classes can be applied for different color styles. These classes include a blue .primary class, a light-blue .info class, a green .success class, and a red .danger class.

    +
    +
    +

    Example buttons

    +

    Button styles can be applied to anything with the .btn applied. Typically you’ll want to apply these to only <a>, <button>, and select <input> elements. Here’s how it looks:

    +
    +      +
    +

    Alternate sizes

    +

    Fancy larger or smaller buttons? Have at it!

    + + +

    Disabled state

    +

    For buttons that are not active or are disabled by the app for one reason or another, use the disabled state. That’s .disabled for links and :disabled for <button> elements.

    +

    Links

    + +

    Buttons

    +
    +   +
    +
    +
    +
    + + + + + + + + + +
    + + +
    +
    +

    Basic alerts

    +

    .alert-message

    +

    One-line messages for highlighting the failure, possible failure, or success of an action. Particularly useful for forms.

    +

    Get the javascript »

    +
    +
    +
    + × +

    Holy guacamole! Best check yo self, you’re not looking too good.

    +
    +
    + × +

    Oh snap! Change this and that and try again.

    +
    +
    + × +

    Well done! You successfully read this alert message.

    +
    +
    + × +

    Heads up! This is an alert that needs your attention, but it’s not a huge priority just yet.

    +
    + +

    Example code

    +
    +<div class="alert-message warning">
    +  <a class="close" href="#">×</a>
    +  <p><strong>Holy guacamole!</strong> Best check yo self, you’re not looking too good.</p>
    +</div>
    +
    +
    +
    + +
    +
    +

    Block messages

    +

    .alert-message.block-message

    +

    For messages that require a bit of explanation, we have paragraph style alerts. These are perfect for bubbling up longer error messages, warning a user of a pending action, or just presenting information for more emphasis on the page.

    +

    Get the javascript »

    +
    +
    +
    + × +

    Holy guacamole! This is a warning! Best check yo self, you’re not looking too good. Nulla vitae elit libero, a pharetra augue. Praesent commodo cursus magna, vel scelerisque nisl consectetur et.

    + +
    +
    + × +

    Oh snap! You got an error! Change this and that and try again.

    +
      +
    • Duis mollis est non commodo luctus
    • +
    • Nisi erat porttitor ligula
    • +
    • Eget lacinia odio sem nec elit
    • +
    + +
    +
    + × +

    Well done! You successfully read this alert message. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Maecenas faucibus mollis interdum.

    + +
    +
    + × +

    Heads up! This is an alert that needs your attention, but it’s not a huge priority just yet.

    + +
    + +

    Example code

    +
    +<div class="alert-message block-message warning">
    +  <a class="close" href="#">×</a>
    +  <p><strong>Holy guacamole! This is a warning!</strong> Best check yo self, you’re not looking too good. Nulla vitae elit libero, a pharetra augue. Praesent commodo cursus magna, vel scelerisque nisl consectetur et.</p>
    +  <div class="alert-actions">
    +    <a class="btn small" href="#">Take this action</a> <a class="btn small" href="#">Or do this</a>
    +  </div>
    +</div>
    +
    +
    +
    +
    + + + +
    + +
    +
    +

    Modals

    +

    Modals—dialogs or lightboxes—are great for contextual actions in situations where it’s important that the background context be maintained.

    +

    Get the javascript »

    +
    +
    +
    + + +
    +
    +
    + + +
    +
    +

    Tooltips

    +

    Twipsies are super useful for aiding a confused user and pointing them in the right direction.

    +

    Get the javascript »

    +
    +
    +
    +
    +

    +Lorem ipsum dolar sit amet illo error ipsum veritatis aut iste perspiciatis iste voluptas natus illo quasi odit aut natus consequuntur consequuntur, aut natus illo voluptatem odit perspiciatis laudantium rem doloremque totam voluptas. Voluptasdicta eaque beatae aperiam ut enim voluptatem explicabo explicabo, voluptas quia odit fugit accusantium totam totam architecto explicabo sit quasi fugit fugit, totam doloremque unde sunt sed dicta quae accusantium fugit voluptas nemo voluptas voluptatem rem quae aut veritatis quasi quae. +

    +
    +
    +
    +
    + + +
    +
    +

    Popovers

    +

    Use popovers to provide subtextual information to a page without affecting layout.

    +

    Get the javascript »

    +
    +
    +
    +
    +
    +
    +
    +

    Popover Title

    +
    +

    Etiam porta sem malesuada magna mollis euismod. Maecenas faucibus mollis interdum. Morbi leo risus, porta ac consectetur ac, vestibulum at eros.

    +
    +
    +
    + +
    +
    +
    +
    +
    + + + + +
    + +
    +
    +

    Getting started

    +

    Integrating javascript with the Bootstrap library is super easy. Below we go over the basics and provide you with some awesome plugins to get you started!

    +

    View javascript docs »

    +
    +
    +

    What's included

    +

    Bring some of Bootstrap's primary components to life with new custom plugins that work with jQuery and Ender. We encourage you to extend and modify them to fit your specific development needs.

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    FileDescription
    bootstrap-modal.jsOur Modal plugin is a super slim take on the traditional modal js plugin! We took special care to include only the bare functionality that we require at twitter.
    bootstrap-alerts.jsThe alert plugin is a super tiny class for adding close functionality to alerts.
    bootstrap-dropdown.jsThis plugin is for adding dropdown interaction to the bootstrap topbar or tabbed navigations.
    bootstrap-scrollspy.jsThe ScrollSpy plugin is for adding an auto updating nav based on scroll position to the bootstrap topbar.
    bootstrap-buttons.jsThis plugin offers additional functionality for managing button state.
    bootstrap-tabs.jsThis plugin adds quick, dynamic tab and pill functionality for cycling through local content.
    bootstrap-twipsy.jsBased on the excellent jQuery.tipsy plugin written by Jason Frame; twipsy is an updated version, which doesn't rely on images, uses css3 for animations, and data-attributes for local title storage!
    bootstrap-popover.jsThe popover plugin provides a simple interface for adding popovers to your application. It extends the boostrap-twipsy.js plugin, so be sure to grab that file as well when including popovers in your project!
    +

    Is javascript necessary?

    +

    Nope! Bootstrap is designed first and foremost to be a CSS library. This javascript provides a basic interactive layer on top of the included styles.

    +

    However, for those who do need javascript, we've provided the plugins above to help you understand how to integrate Bootstrap with javascript and to give you a quick, lightweight option for the basic functionality right away.

    +

    For more information and to see some live demos, please refer to our plugin documentation page.

    +
    +
    + + + +
    + +
    +
    +

    Bootstrap was built from Preboot, an open-source pack of mixins and variables to be used in conjunction with Less, a CSS preprocessor for faster and easier web development.

    +

    Check out how we used Preboot in Bootstrap and how you can make use of it should you choose to run Less on your next project.

    +
    +
    +

    How to use it

    +

    Use this option to make full use of Bootstrap’s Less variables, mixins, and nesting in CSS via javascript in your browser.

    +
    +<link rel="stylesheet/less" href="less/bootstrap.less" media="all" />
    +<script src="js/less-1.1.4.min.js"></script>
    +

    Not feeling the .js solution? Try the Less Mac app or use Node.js to compile when you deploy your code.

    + +

    What’s included

    +

    Here are some of the highlights of what’s included in Twitter Bootstrap as part of Bootstrap. Head over to the Bootstrap website or Github project page to download and learn more.

    +

    Variables

    +

    Variables in Less are perfect for maintaining and updating your CSS headache free. When you want to change a color value or a frequently used value, update it in one spot and you’re set.

    +
    +// Links
    +@linkColor:         #8b59c2;
    +@linkColorHover:    darken(@linkColor, 10);
    +
    +// Grays
    +@black:             #000;
    +@grayDark:          lighten(@black, 25%);
    +@gray:              lighten(@black, 50%);
    +@grayLight:         lighten(@black, 70%);
    +@grayLighter:       lighten(@black, 90%);
    +@white:             #fff;
    +
    +// Accent Colors
    +@blue:              #08b5fb;
    +@green:             #46a546;
    +@red:               #9d261d;
    +@yellow:            #ffc40d;
    +@orange:            #f89406;
    +@pink:              #c3325f;
    +@purple:            #7a43b6;
    +
    +// Baseline grid
    +@basefont:          13px;
    +@baseline:          18px;
    +
    + +

    Commenting

    +

    Less also provides another style of commenting in addition to CSS’s normal /* ... */ syntax.

    +
    +// This is a comment
    +/* This is also a comment */
    +
    + +

    Mixins up the wazoo

    +

    Mixins are basically includes or partials for CSS, allowing you to combine a block of code into one. They’re great for vendor prefixed properties like box-shadow, cross-browser gradients, font stacks, and more. Below is a sample of the mixins that are included with Bootstrap.

    +

    Font stacks

    +
    +#font {
    +  .shorthand(@weight: normal, @size: 14px, @lineHeight: 20px) {
    +    font-size: @size;
    +    font-weight: @weight;
    +    line-height: @lineHeight;
    +  }
    +  .sans-serif(@weight: normal, @size: 14px, @lineHeight: 20px) {
    +    font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
    +    font-size: @size;
    +    font-weight: @weight;
    +    line-height: @lineHeight;
    +  }
    +  ...
    +}
    +
    +

    Gradients

    +
    +#gradient {
    +  ...
    +  .vertical (@startColor: #555, @endColor: #333) {
    +    background-color: @endColor;
    +    background-repeat: repeat-x;
    +    background-image: -khtml-gradient(linear, left top, left bottom, from(@startColor), to(@endColor)); // Konqueror
    +    background-image: -moz-linear-gradient(@startColor, @endColor); // FF 3.6+
    +    background-image: -ms-linear-gradient(@startColor, @endColor); // IE10
    +    background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, @startColor), color-stop(100%, @endColor)); // Safari 4+, Chrome 2+
    +    background-image: -webkit-linear-gradient(@startColor, @endColor); // Safari 5.1+, Chrome 10+
    +    background-image: -o-linear-gradient(@startColor, @endColor); // Opera 11.10
    +    background-image: linear-gradient(@startColor, @endColor); // The standard
    +  }
    +  ...
    +}
    +
    + +

    Operations

    +

    Get fancy and perform some math to generate flexible and powerful mixins like the one below.

    +
    +// Griditude
    +@gridColumns:       16;
    +@gridColumnWidth:   40px;
    +@gridGutterWidth:   20px;
    +@siteWidth:         (@gridColumns * @gridColumnWidth) + (@gridGutterWidth * (@gridColumns - 1));
    +
    +// Make some columns
    +.columns(@columnSpan: 1) {
    +  width: (@gridColumnWidth * @columnSpan) + (@gridGutterWidth * (@columnSpan - 1));
    +}
    +
    + +

    Compiling Less

    +

    After modifying the .less files in /lib/, you'll need to recompile them in order to regenerate the bootstrap-*.*.*.css and bootstrap-*.*.*.min.css files. If you're submitting a pull request to GitHub, you must always recompile.

    +

    Ways to compile

    + + + + + + + + + + + + + + + + + + + + + + + + +
    MethodSteps
    Node with makefile +

    Install the less command line compiler with npm by running the following command:

    +
    $ npm install lessc
    +

    Once installed just run make from the root of your bootstrap directory and you're all set.

    +

    Additionally, if you have watchr installed, you may run make watch to have bootstrap automatically rebuilt every time you edit a file in the bootstrap lib (this isn't required, just a convenience method).

    +
    Javascript +

    Download the latest Less.js and include the path to it (and Bootstrap) in the head.

    +
    +<link rel="stylesheet/less" href="/path/to/bootstrap.less">
    +<script src="/path/to/less.js"></script>
    +
    +

    To recompile the .less files, just save them and reload your page. Less.js compiles them and stores them in local storage.

    +
    Command line +

    If you already have the less command line tool installed, simply run the following command:

    +
    $ lessc ./lib/bootstrap.less > bootstrap.css
    +

    Be sure to include --compress in that command if you're trying to save some bytes!

    +
    Less Mac app +

    The unofficial Mac app watches directories of .less files and compiles the code to local files after every save of a watched .less file.

    +

    If you like, you can toggle preferences in the app for automatic minifying and which directory the compiled files end up in.

    +
    +
    +
    + +
    + + + +
    + + + + + + + + + + + + + + + + diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/docs/javascript.html b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/javascript.html new file mode 100644 index 0000000000..9176ff6980 --- /dev/null +++ b/src/dashboard/src/media/vendor/twitter-bootstrap/docs/javascript.html @@ -0,0 +1,798 @@ + + + + + Bootstrap, from Twitter + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    +
    +

    Javascript for Bootstrap

    +

    + Bring Bootstrap's components to life with new, custom plugins that work with jQuery and Ender. +

    +

    ← Back to Bootstrap home

    +
    +
    +
    + +
    + + + + + + + + + + + + +
    + +
    +
    +

    This plugin is for adding the scrollspy (auto updating nav) interaction to the bootstrap topbar.

    + Download +
    +
    +

    Using bootstrap-scrollspy.js

    +
    $('#topbar').scrollSpy()
    +

    Markup

    +

    To easily add scrollspy behavior to your nav, just add the data-scrollspy attribute to the .topbar. +

    <div class="topbar" data-scrollspy="scrollspy" >...</div>
    +

    Methods

    +

    $().scrollSpy()

    +

    + Auto activates navigation buttons by users scroll position. +

    +
    $('body > .topbar').scrollSpy()
    +

    Notice Topbar anchor tags must have resolvable id targets. For example, a <a href="#home">home</a> must correspond to something in the dom like <div id="home"></div>. +

    +

    .scrollSpy('refresh')

    +

    The scrollspy caches nav buttons and section coordinates for performance. If you need to update this cache (likely if you have dynamic content) just call this refresh method. If you used the data attribute to define your scrollspy, just call refresh on the body.

    +
    $('body').scrollSpy('refresh')
    +

    Demo

    +

    Checkout the the topbar navigation on this page.

    +
    +
    +
    + + + +
    + +
    +
    +

    This plugin offers additional functionality for managing button state.

    + Download +
    +
    +

    Using bootstrap-buttons.js

    +
    $('.tabs').button()
    +

    Methods

    +

    $().button('toggle')

    +

    Toggles push state. Gives btn the look that it's been activated.

    +

    Notice You can enable auto toggling of a button by using the data-toggle attribute.

    +
    <button class="btn" data-toggle="toggle" >...</button>
    +

    $().button('loading')

    +

    Sets button state to loading - disables button and swaps text to loading text. Loading text should be defined on the button element using the data attribute data-loading-text. +

    +
    <button class="btn" data-loading-text="loading stuff..." >...</button>
    +

    $().button('reset')

    +

    Resets button state - swaps text to original text.

    +

    $().button(string)

    +

    Resets button state - swaps text to any data defined text state.

    +
    <button class="btn" data-complete-text="finished!" >...</button>
    +<script>
    +  $('.btn').button('complete')
    +</scrip>
    +

    Demo

    + + + +
    +
    +
    + + + + +
    + +
    +
    +

    This plugin adds quick, dynamic tab and pill functionality.

    + Download +
    +
    +

    Using bootstrap-tabs.js

    +
    $('.tabs').tabs()
    +

    Markup

    +

    You can activate a tab or pill navigation without writing any javascript by simply giving them a data-tabs or data-pills attribute.

    +
     <ul class="tabs" data-tabs="tabs" >...</ul>
    +

    Methods

    +

    $().tabs or $().pills

    +

    + Activates tab and pill functionality for a given container. Tab links should reference id's in the document. +

    +
    +<ul class="tabs">
    +  <li class="active"><a href="#home">Home</a></li>
    +  <li><a href="#profile">Profile</a></li>
    +  <li><a href="#messages">Messages</a></li>
    +  <li><a href="#settings">Settings</a></li>
    +</ul>
    +
    +<div class="pill-content">
    +  <div class="active" id="home">...</div>
    +  <div id="profile">...</div>
    +  <div id="messages">...</div>
    +  <div id="settings">...</div>
    +</div>
    +
    +<script>
    +  $(function () {
    +    $('.tabs').tabs()
    +  })
    +</script>
    +

    +

    Events

    + + + + + + + + + + + + + +
    EventDescription
    changeThis event fires on tab change. Use event.target and event.relatedTarget to target the active tab and the previous active tab respectively.
    + +
    +$('#.tabs').bind('change', function (e) {
    +  e.target // activated tab
    +  e.relatedTarget // previous tab
    +})
    +

    Demo

    + +
    +
    +

    Raw denim you probably haven't heard of them jean shorts Austin. Nesciunt tofu stumptown aliqua, retro synth master cleanse. Mustache cliche tempor, williamsburg carles vegan helvetica. Reprehenderit butcher retro keffiyeh dreamcatcher synth. Cosby sweater eu banh mi, qui irure terry richardson ex squid. Aliquip placeat salvia cillum iphone. Seitan aliquip quis cardigan american apparel, butcher voluptate nisi qui.

    +
    +
    +

    Food truck fixie locavore, accusamus mcsweeney's marfa nulla single-origin coffee squid. Exercitation +1 labore velit, blog sartorial PBR leggings next level wes anderson artisan four loko farm-to-table craft beer twee. Qui photo booth letterpress, commodo enim craft beer mlkshk aliquip jean shorts ullamco ad vinyl cillum PBR. Homo nostrud organic, assumenda labore aesthetic magna delectus mollit. Keytar helvetica VHS salvia yr, vero magna velit sapiente labore stumptown. Vegan fanny pack odio cillum wes anderson 8-bit, sustainable jean shorts beard ut DIY ethical culpa terry richardson biodiesel. Art party scenester stumptown, tumblr butcher vero sint qui sapiente accusamus tattooed echo park.

    +
    +
    +

    Banksy do proident, brooklyn photo booth delectus sunt artisan sed organic exercitation eiusmod four loko. Quis tattooed iphone esse aliqua. Master cleanse vero fixie mcsweeney's. Ethical portland aute, irony food truck pitchfork lomo eu anim. Aesthetic blog DIY, ethical beard leggings tofu consequat whatever cardigan nostrud. Helvetica you probably haven't heard of them carles, marfa veniam occaecat lomo before they sold out in shoreditch scenester sustainable thundercats. Consectetur tofu craft beer, mollit brunch fap echo park pitchfork mustache dolor.

    +
    +
    +

    Sunt qui biodiesel mollit officia, fanny pack put a bird on it thundercats seitan squid ad wolf bicycle rights blog. Et aute readymade farm-to-table carles 8-bit, nesciunt nulla etsy adipisicing organic ea. Master cleanse mollit high life, next level Austin nesciunt american apparel twee mustache adipisicing reprehenderit hoodie portland irony. Aliqua tofu quinoa +1 commodo eiusmod. High life williamsburg cupidatat twee homo leggings. Four loko vinyl DIY consectetur nisi, marfa retro keffiyeh vegan. Fanny pack viral retro consectetur gentrify fap.

    +
    +
    +

    Etsy mixtape wayfarers, ethical wes anderson tofu before they sold out mcsweeney's organic lomo retro fanny pack lo-fi farm-to-table readymade. Messenger bag gentrify pitchfork tattooed craft beer, iphone skateboard locavore carles etsy salvia banksy hoodie helvetica. DIY synth PBR banksy irony. Leggings gentrify squid 8-bit cred pitchfork. Williamsburg banh mi whatever gluten-free, carles pitchfork biodiesel fixie etsy retro mlkshk vice blog. Scenester cred you probably haven't heard of them, vinyl craft beer blog stumptown. Pitchfork sustainable tofu synth chambray yr.

    +
    +
    +

    Trust fund seitan letterpress, keytar raw denim keffiyeh etsy art party before they sold out master cleanse gluten-free squid scenester freegan cosby sweater. Fanny pack portland seitan DIY, art party locavore wolf cliche high life echo park Austin. Cred vinyl keffiyeh DIY salvia PBR, banh mi before they sold out farm-to-table VHS viral locavore cosby sweater. Lomo wolf viral, mustache readymade thundercats keffiyeh craft beer marfa ethical. Wolf salvia freegan, sartorial keffiyeh echo park vegan.

    +
    +
    +
    +
    +
    + + + +
    + +
    +
    +

    Based on the excellent jQuery.tipsy plugin written by Jason Frame; twipsy is an updated version, which doesn't rely on images, uses css3 for animations, and data-attributes for title storage!

    + Download +
    +
    +

    Using bootstrap-twipsy.js

    +
    $('#example').twipsy(options)
    +

    Options

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Nametypedefaultdescription
    animatebooleantrueapply a css fade transition to the tooltip
    delayInnumber0delay before showing tooltip (ms)
    delayOutnumber0delay before hiding tooltip (ms)
    fallbackstring''text to use when no tooltip title is present
    placementstring'above'how to position the tooltip - above | below | left | right
    htmlbooleanfalseallows html content within tooltip
    livebooleanfalseuse event delegation instead of individual event handlers
    offsetnumber0pixel offset of tooltip from target element
    titlestring, function'title'attribute or method for retrieving title text
    triggerstring'hover'how tooltip is triggered - hover | focus | manual
    templatestring[default markup]The html template used for rendering a twipsy.
    +

    Notice Individual twipsy instance options can alternatively be specified through the use of data attributes.

    +
    <a href="#" data-placement="below" rel='twipsy' title='Some title text'>text</a>
    +

    Methods

    +

    $().twipsy(options)

    +

    Attaches a twipsy handler to an element collection.

    +

    .twipsy('show')

    +

    Reveals an elements twipsy.

    +
    $('#element').twipsy('show')
    +

    .twipsy('hide')

    +

    Hides an elements twipsy.

    +
    $('#element').twipsy('hide')
    +

    .twipsy(true)

    +

    Returns an elements twipsy class instance.

    +
    $('#element').twipsy(true)
    +

    Notice Alternatively, this can be retrieved with $().data('twipsy').

    +

    Demo

    +
    +

    Tight pants next level keffiyeh you probably haven't heard of them. Photo booth beard raw denim letterpress vegan messenger bag stumptown. Farm-to-table seitan, mcsweeney's fixie sustainable quinoa 8-bit american apparel have a terry richardson vinyl chambray. Beard stumptown, cardigans banh mi lomo thundercats. Tofu biodiesel williamsburg marfa, four loko mcsweeney's cleanse vegan chambray. A really ironic artisan whatever keytar, scenester farm-to-table banksy Austin twitter handle freegan cred raw denim single-origin coffee viral. +

    +
    + +
    +
    +
    + + + +
    + +
    +
    +

    The popover plugin provides a simple interface for adding popovers to your application. It extends the bootstrap-twipsy.js plugin, so be sure to grab that file as well when including popovers in your project!

    +

    Notice You must include the bootstrap-twipsy.js file before bootstrap-popover.js.

    + Download +
    +
    +

    Using bootstrap-popover.js

    +
    $('#example').popover(options)
    +

    Options

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Nametypedefaultdescription
    animatebooleantrueapply a css fade transition to the tooltip
    delayInnumber0delay before showing tooltip (ms)
    delayOutnumber0delay before hiding tooltip (ms)
    fallbackstring''text to use when no tooltip title is present
    placementstring'right'how to position the tooltip - above | below | left | right
    htmlbooleanfalseallows html content within tooltip
    livebooleanfalseuse event delegation instead of individual event handlers
    offsetnumber0pixel offset of tooltip from target element
    titlestring, function'title'attribute or method for retrieving title text
    contentstring, function'data-content'a string or method for retrieving content text. if none are provided, content will be sourced from a data-content attribute.
    triggerstring'hover'how tooltip is triggered - hover | focus | manual
    templatestring[default markup]The html template used for rendering a popover.
    +

    Notice Individual popover instance options can alternatively be specified through the use of data attributes.

    +
    <a data-placement="below" href="#" class="btn danger" rel="popover">text</a>
    +

    Methods

    +

    $().popover(options)

    +

    Initializes popovers for an element collection.

    +

    .popover('show')

    +

    Reveals an elements popover.

    +
    $('#element').popover('show')
    +

    .popover('hide')

    +

    Hides an elements popover.

    +
    $('#element').popover('hide')
    +

    Demo

    + hover for popover + +
    +
    +
    + + + + +
    + +
    +
    +

    The alert plugin is a super tiny class for adding close functionality to alerts.

    + Download +
    +
    +

    Using bootstrap-alerts.js

    +
    $(".alert-message").alert()
    +

    Markup

    +

    Just add a data-alert attribute to your alert messages to automatically give them close functionality.

    +

    Options

    + + + + + + + + + + + + + + + + + +
    Nametypedefaultdescription
    selectorstring'.close'What selector to target for closing an alert.
    + +

    Methods

    +

    $().alert()

    +

    Wraps all alerts with close functionality. To have your alerts animate out when closed, make sure they have the .fade and .in class already applied to them.

    +

    .alert('close')

    +

    Closes an alert.

    +
    $(".alert-message").alert('close')
    +

    Demo

    +
    + × +

    Holy guacamole! Best check yo self, you’re not looking too good.

    +
    +
    + × +

    Oh snap! You got an error! Change this and that and try again. Duis mollis, est non commodo luctus, nisi erat porttitor ligula, eget lacinia odio sem nec elit. Cras mattis consectetur purus sit amet fermentum.

    + +
    +
    +
    +
    + + +
    + + + + + diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/examples/container-app.html b/src/dashboard/src/media/vendor/twitter-bootstrap/examples/container-app.html new file mode 100644 index 0000000000..3c371a8c4a --- /dev/null +++ b/src/dashboard/src/media/vendor/twitter-bootstrap/examples/container-app.html @@ -0,0 +1,119 @@ + + + + + Bootstrap, from Twitter + + + + + + + + + + + + + + + + + + + +
    +
    +
    + Project name + +
    + + + +
    +
    +
    +
    + +
    + +
    + +
    +
    +

    Main content

    +
    +
    +

    Secondary content

    +
    +
    +
    + +
    +

    © Company 2011

    +
    + +
    + + + diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/examples/fluid.html b/src/dashboard/src/media/vendor/twitter-bootstrap/examples/fluid.html new file mode 100644 index 0000000000..b8405cea16 --- /dev/null +++ b/src/dashboard/src/media/vendor/twitter-bootstrap/examples/fluid.html @@ -0,0 +1,122 @@ + + + + + Bootstrap, from Twitter + + + + + + + + + + + + + + + + + + + +
    +
    +
    + Project name + +

    Logged in as username

    +
    +
    +
    + +
    + +
    + +
    +

    Hello, world!

    +

    Vestibulum id ligula porta felis euismod semper. Integer posuere erat a ante venenatis dapibus posuere velit aliquet. Duis mollis, est non commodo luctus, nisi erat porttitor ligula, eget lacinia odio sem nec elit.

    +

    Learn more »

    +
    + +
    +
    +

    Heading

    +

    Etiam porta sem malesuada magna mollis euismod. Integer posuere erat a ante venenatis dapibus posuere velit aliquet. Aenean eu leo quam. Pellentesque ornare sem lacinia quam venenatis vestibulum. Duis mollis, est non commodo luctus, nisi erat porttitor ligula, eget lacinia odio sem nec elit.

    +

    View details »

    +
    +
    +

    Heading

    +

    Donec id elit non mi porta gravida at eget metus. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Etiam porta sem malesuada magna mollis euismod. Donec sed odio dui.

    +

    View details »

    +
    +
    +

    Heading

    +

    Donec sed odio dui. Cras justo odio, dapibus ac facilisis in, egestas eget quam. Vestibulum id ligula porta felis euismod semper. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus.

    +

    View details »

    +
    +
    +
    + +
    +
    +

    Heading

    +

    Etiam porta sem malesuada magna mollis euismod. Integer posuere erat a ante venenatis dapibus posuere velit aliquet. Aenean eu leo quam. Pellentesque ornare sem lacinia quam venenatis vestibulum. Duis mollis, est non commodo luctus, nisi erat porttitor ligula, eget lacinia odio sem nec elit.

    +

    View details »

    +
    +
    +

    Heading

    +

    Donec id elit non mi porta gravida at eget metus. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Etiam porta sem malesuada magna mollis euismod. Donec sed odio dui.

    +

    View details »

    +
    +
    +

    Heading

    +

    Donec sed odio dui. Cras justo odio, dapibus ac facilisis in, egestas eget quam. Vestibulum id ligula porta felis euismod semper. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus.

    +

    View details »

    +
    +
    +
    +

    © Company 2011

    +
    +
    +
    + + + \ No newline at end of file diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/examples/hero.html b/src/dashboard/src/media/vendor/twitter-bootstrap/examples/hero.html new file mode 100644 index 0000000000..49131d294c --- /dev/null +++ b/src/dashboard/src/media/vendor/twitter-bootstrap/examples/hero.html @@ -0,0 +1,79 @@ + + + + + Bootstrap, from Twitter + + + + + + + + + + + + + + + + + + + +
    +
    +
    + Project name + +
    +
    +
    + +
    + + +
    +

    Hello, world!

    +

    Vestibulum id ligula porta felis euismod semper. Integer posuere erat a ante venenatis dapibus posuere velit aliquet. Duis mollis, est non commodo luctus, nisi erat porttitor ligula, eget lacinia odio sem nec elit.

    +

    Learn more »

    +
    + + +
    +
    +

    Heading

    +

    Etiam porta sem malesuada magna mollis euismod. Integer posuere erat a ante venenatis dapibus posuere velit aliquet. Aenean eu leo quam. Pellentesque ornare sem lacinia quam venenatis vestibulum. Duis mollis, est non commodo luctus, nisi erat porttitor ligula, eget lacinia odio sem nec elit.

    +

    View details »

    +
    +
    +

    Heading

    +

    Donec id elit non mi porta gravida at eget metus. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Etiam porta sem malesuada magna mollis euismod. Donec sed odio dui.

    +

    View details »

    +
    +
    +

    Heading

    +

    Donec sed odio dui. Cras justo odio, dapibus ac facilisis in, egestas eget quam. Vestibulum id ligula porta felis euismod semper. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus.

    +

    View details »

    +
    +
    + +
    +

    © Company 2011

    +
    + +
    + + + diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/js/bootstrap-alerts.js b/src/dashboard/src/media/vendor/twitter-bootstrap/js/bootstrap-alerts.js new file mode 100644 index 0000000000..37bb430aa2 --- /dev/null +++ b/src/dashboard/src/media/vendor/twitter-bootstrap/js/bootstrap-alerts.js @@ -0,0 +1,113 @@ +/* ========================================================== + * bootstrap-alerts.js v1.4.0 + * http://twitter.github.com/bootstrap/javascript.html#alerts + * ========================================================== + * Copyright 2011 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ========================================================== */ + + +!function( $ ){ + + "use strict" + + /* CSS TRANSITION SUPPORT (https://gist.github.com/373874) + * ======================================================= */ + + var transitionEnd + + $(document).ready(function () { + + $.support.transition = (function () { + var thisBody = document.body || document.documentElement + , thisStyle = thisBody.style + , support = thisStyle.transition !== undefined || thisStyle.WebkitTransition !== undefined || thisStyle.MozTransition !== undefined || thisStyle.MsTransition !== undefined || thisStyle.OTransition !== undefined + return support + })() + + // set CSS transition event type + if ( $.support.transition ) { + transitionEnd = "TransitionEnd" + if ( $.browser.webkit ) { + transitionEnd = "webkitTransitionEnd" + } else if ( $.browser.mozilla ) { + transitionEnd = "transitionend" + } else if ( $.browser.opera ) { + transitionEnd = "oTransitionEnd" + } + } + + }) + + /* ALERT CLASS DEFINITION + * ====================== */ + + var Alert = function ( content, options ) { + this.settings = $.extend({}, $.fn.alert.defaults, options) + this.$element = $(content) + .delegate(this.settings.selector, 'click', this.close) + } + + Alert.prototype = { + + close: function (e) { + var $element = $(this).parent('.alert-message') + + e && e.preventDefault() + $element.removeClass('in') + + function removeElement () { + $element.remove() + } + + $.support.transition && $element.hasClass('fade') ? + $element.bind(transitionEnd, removeElement) : + removeElement() + } + + } + + + /* ALERT PLUGIN DEFINITION + * ======================= */ + + $.fn.alert = function ( options ) { + + if ( options === true ) { + return this.data('alert') + } + + return this.each(function () { + var $this = $(this) + + if ( typeof options == 'string' ) { + return $this.data('alert')[options]() + } + + $(this).data('alert', new Alert( this, options )) + + }) + } + + $.fn.alert.defaults = { + selector: '.close' + } + + $(document).ready(function () { + new Alert($('body'), { + selector: '.alert-message[data-alert] .close' + }) + }) + +}( window.jQuery || window.ender ); \ No newline at end of file diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/js/bootstrap-buttons.js b/src/dashboard/src/media/vendor/twitter-bootstrap/js/bootstrap-buttons.js new file mode 100644 index 0000000000..16fa161cb9 --- /dev/null +++ b/src/dashboard/src/media/vendor/twitter-bootstrap/js/bootstrap-buttons.js @@ -0,0 +1,62 @@ +/* ============================================================ + * bootstrap-buttons.js v1.4.0 + * http://twitter.github.com/bootstrap/javascript.html#buttons + * ============================================================ + * Copyright 2011 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================ */ + +!function( $ ){ + + "use strict" + + function setState(el, state) { + var d = 'disabled' + , $el = $(el) + , data = $el.data() + + state = state + 'Text' + data.resetText || $el.data('resetText', $el.html()) + + $el.html( data[state] || $.fn.button.defaults[state] ) + + state == 'loadingText' ? + $el.addClass(d).attr(d, d) : + $el.removeClass(d).removeAttr(d) + } + + function toggle(el) { + $(el).toggleClass('active') + } + + $.fn.button = function(options) { + return this.each(function () { + if (options == 'toggle') { + return toggle(this) + } + options && setState(this, options) + }) + } + + $.fn.button.defaults = { + loadingText: 'loading...' + } + + $(function () { + $('body').delegate('.btn[data-toggle]', 'click', function () { + $(this).button('toggle') + }) + }) + +}( window.jQuery || window.ender ); \ No newline at end of file diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/js/bootstrap-dropdown.js b/src/dashboard/src/media/vendor/twitter-bootstrap/js/bootstrap-dropdown.js new file mode 100644 index 0000000000..fda6da597e --- /dev/null +++ b/src/dashboard/src/media/vendor/twitter-bootstrap/js/bootstrap-dropdown.js @@ -0,0 +1,55 @@ +/* ============================================================ + * bootstrap-dropdown.js v1.4.0 + * http://twitter.github.com/bootstrap/javascript.html#dropdown + * ============================================================ + * Copyright 2011 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================ */ + + +!function( $ ){ + + "use strict" + + /* DROPDOWN PLUGIN DEFINITION + * ========================== */ + + $.fn.dropdown = function ( selector ) { + return this.each(function () { + $(this).delegate(selector || d, 'click', function (e) { + var li = $(this).parent('li') + , isActive = li.hasClass('open') + + clearMenus() + !isActive && li.toggleClass('open') + return false + }) + }) + } + + /* APPLY TO STANDARD DROPDOWN ELEMENTS + * =================================== */ + + var d = 'a.menu, .dropdown-toggle' + + function clearMenus() { + $(d).parent('li').removeClass('open') + } + + $(function () { + $('html').bind("click", clearMenus) + $('body').dropdown( '[data-dropdown] a.menu, [data-dropdown] .dropdown-toggle' ) + }) + +}( window.jQuery || window.ender ); diff --git a/src/dashboard/src/media/vendor/twitter-bootstrap/js/bootstrap-modal.js b/src/dashboard/src/media/vendor/twitter-bootstrap/js/bootstrap-modal.js new file mode 100644 index 0000000000..b328217f81 --- /dev/null +++ b/src/dashboard/src/media/vendor/twitter-bootstrap/js/bootstrap-modal.js @@ -0,0 +1,260 @@ +/* ========================================================= + * bootstrap-modal.js v1.4.0 + * http://twitter.github.com/bootstrap/javascript.html#modal + * ========================================================= + * Copyright 2011 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ========================================================= */ + + +!function( $ ){ + + "use strict" + + /* CSS TRANSITION SUPPORT (https://gist.github.com/373874) + * ======================================================= */ + + var transitionEnd + + $(document).ready(function () { + + $.support.transition = (function () { + var thisBody = document.body || document.documentElement + , thisStyle = thisBody.style + , support = thisStyle.transition !== undefined || thisStyle.WebkitTransition !== undefined || thisStyle.MozTransition !== undefined || thisStyle.MsTransition !== undefined || thisStyle.OTransition !== undefined + return support + })() + + // set CSS transition event type + if ( $.support.transition ) { + transitionEnd = "TransitionEnd" + if ( $.browser.webkit ) { + transitionEnd = "webkitTransitionEnd" + } else if ( $.browser.mozilla ) { + transitionEnd = "transitionend" + } else if ( $.browser.opera ) { + transitionEnd = "oTransitionEnd" + } + } + + }) + + + /* MODAL PUBLIC CLASS DEFINITION + * ============================= */ + + var Modal = function ( content, options ) { + this.settings = $.extend({}, $.fn.modal.defaults, options) + this.$element = $(content) + .delegate('.close', 'click.modal', $.proxy(this.hide, this)) + + if ( this.settings.show ) { + this.show() + } + + return this + } + + Modal.prototype = { + + toggle: function () { + return this[!this.isShown ? 'show' : 'hide']() + } + + , show: function () { + var that = this + this.isShown = true + this.$element.trigger('show') + + escape.call(this) + backdrop.call(this, function () { + var transition = $.support.transition && that.$element.hasClass('fade') + + that.$element + .appendTo(document.body) + .show() + + if (transition) { + that.$element[0].offsetWidth // force reflow + } + + that.$element.addClass('in') + + transition ? + that.$element.one(transitionEnd, function () { that.$element.trigger('shown') }) : + that.$element.trigger('shown') + + }) + + return this + } + + , hide: function (e) { + e && e.preventDefault() + + if ( !this.isShown ) { + return this + } + + var that = this + this.isShown = false + + escape.call(this) + + this.$element + .trigger('hide') + .removeClass('in') + + $.support.transition && this.$element.hasClass('fade') ? + hideWithTransition.call(this) : + hideModal.call(this) + + return this + } + + } + + + /* MODAL PRIVATE METHODS + * ===================== */ + + function hideWithTransition() { + // firefox drops transitionEnd events :{o + var that = this + , timeout = setTimeout(function () { + that.$element.unbind(transitionEnd) + hideModal.call(that) + }, 500) + + this.$element.one(transitionEnd, function () { + clearTimeout(timeout) + hideModal.call(that) + }) + } + + function hideModal (that) { + this.$element + .hide() + .trigger('hidden') + + backdrop.call(this) + } + + function backdrop ( callback ) { + var that = this + , animate = this.$element.hasClass('fade') ? 'fade' : '' + if ( this.isShown && this.settings.backdrop ) { + var doAnimate = $.support.transition && animate + + this.$backdrop = $('