diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..96ef6c0b944e24fc22f51f18136cd62ffd5b0b8f --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +/target +Cargo.lock diff --git a/COPYING b/COPYING new file mode 100644 index 0000000000000000000000000000000000000000..be3f7b28e564e7dd05eaf59d64adba1a4065ac0e --- /dev/null +++ b/COPYING @@ -0,0 +1,661 @@ + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/> + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + <one line to give the program's name and a brief idea of what it does.> + Copyright (C) <year> <name of author> + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see <https://www.gnu.org/licenses/>. + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +<https://www.gnu.org/licenses/>. diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..f73ef38555e6352a9a5473b7be7cc8621100baaa --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,105 @@ +[package] +name = "mc_schema" +version = "0.28.5" +authors = ["Jan Wetzlich <jan.wetzlich@ptb.de>"] +edition = "2021" +license = "AGPL-3.0-or-later" +rust-version = "1.62" + +[features] +default = ["call_id_tokio", "rusqlite_traits"] # disable for wasm build +default_verbose = ["dep:default_verbose", "dpsfw_types/default_verbose"] +call_id_tokio = ["tokio", "tokio-postgres","db_lib", "backend_lib", "dpsfw", "dpsfw_types/call_id_tokio"] +rusqlite_traits = ["dep:rusqlite", "dpsfw_types/rusqlite_traits"] +dpif_world = ["dpsfw/dpif_world"] +dpif_vpn = ["dpsfw/dpif_vpn"] +dpif_intra = ["dpsfw/dpif_intra"] +dpif_debug = ["dpsfw/dpif_debug"] + +[dependencies] +bytes = "^1.0" +serde = { version = "*", features = ["derive"]} +serde_json = "*" +once_cell = "^1.7" +async-trait = "*" + +#backend +strum_macros = "^0.26" +strum = "^0.26" +serde_repr = "0.1" +quick-xml = { version = "0.36.0", features = [ "serialize" ] } +regex = "^1.4" +lazy_static = "^1.2" +url = "^2.4" +percent-encoding = "2.3" + + +# utils = { git = "https://gitlab1.ptb.de/metrology-cloud/utils", tag = "v0.4.11"} +utils = { git = "https://gitlab1.ptb.de/datapolis/utils", branch = "stable" } + + +[dependencies.phf] #backend +version = "^0.11" +features = ["macros"] + +[dependencies.tokio] # connection to opcua-client +version = "^1.0" +optional = true +features = ["rt"] + +[dependencies.dpsfw] +git = "https://gitlab1.ptb.de/datapolis/dpsfw" +branch = "stable" +# branch = "doc_friendly_routing_table" +# path = "../procs" +optional = true + +[dependencies.db_lib] +git = "https://gitlab1.ptb.de/datapolis/db" +branch = "stable" +# branch = "29-inter-database-scheme-foreign-key-relations" +# path = "../procs" +optional = true + +[dependencies.backend_lib] +git = "https://gitlab1.ptb.de/datapolis/backend" +branch = "stable" +# branch = "cummulated-procs" +# path = "../procs" +optional = true + +[dependencies.dpsfw_types] +git = "https://gitlab1.ptb.de/datapolis/dpsfw" +branch = "stable" +default-features = false +# branch = "doc_friendly_routing_table" +# path = "../procs" + +[dependencies.dp_proc_macros] +git = "https://gitlab1.ptb.de/datapolis/dpsfw" +branch = "stable" +# branch = "doc_friendly_routing_table" +# path = "../procs" + +[dependencies.default_verbose] +git = "https://gitlab1.ptb.de/datapolis/docu_maker.git" +branch = "stable" +optional = true + +[dependencies.tokio-postgres] +version = "^0.7" +optional = true + +[dependencies.postgres-types] +# version = "^0.15" +version = "^0.2" +features = ["with-time-0_3"] + +[dependencies.time] +version = "^0.3" +features = ["local-offset"] + +[dependencies.rusqlite] +version = "0.*" +features = ["uuid", "time"] +optional = true diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..83e292b6cda4122cb73c5c94837454f8705d700a --- /dev/null +++ b/LICENSE @@ -0,0 +1,15 @@ +mc_schema - schema extension to make a MC-node from a DataPolis-node +Copyright (C) 2021 jan.wetzlich@ptb.de + +This program is free software: you can redistribute it and/or modify +it under the terms of the GNU Affero General Public License as +published by the Free Software Foundation, either version 3 of the +License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Affero General Public License for more details. + +You should have received a copy of the GNU Affero General Public License +along with this program. If not, see <https://www.gnu.org/licenses/>. diff --git a/README.md b/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a53b5a773aee1f819127e263b2f66497aff5cbca --- /dev/null +++ b/README.md @@ -0,0 +1,17 @@ +# Crate Objectifs +This lib-`crate` is a schema extension module/`crate` and provides domain specific stuff. It contains the backend and db methods and IO-structs used by a MetrologyCloud flavored DataPolis-Node. Backend module has a world/public interface so a UI or Stakekeholder IT-sys can use it. + +### Methods: + * add, edit and delete Measuring Instrument Devices + * add, edit and delete Measuring Instrument Series + * query Measuring Instrument Devices for software version, software hash and initiate software Updates + * Basic mockedup processes like Sofware-Update, Reverification and grouped Reverification + +A running backend/db service would include the router-`fn`s defined here. Additional Postgres tables are setup by sql-scheme (`sql/0x_mc_...-sql`) defined here. + +# contributors + * Jan Wetzlich + * Maximilian Dohlus + * Jasper Gräflich +# instructions for running the code +This is a lib-`crate` so no running `bin` here. See https://gitlab1.ptb.de/datapolis/services for a runnable service binary using this schema/domain extension `crate`/module. diff --git a/doc/2023-DigitalerEichantrag.xsd b/doc/2023-DigitalerEichantrag.xsd new file mode 100644 index 0000000000000000000000000000000000000000..0880f297fa14cd6523c79135302eb6b30685847a --- /dev/null +++ b/doc/2023-DigitalerEichantrag.xsd @@ -0,0 +1,720 @@ +<?xml version="1.0" encoding="utf-8"?> +<xs:schema attributeFormDefault="unqualified" elementFormDefault="qualified" xmlns:xs="http://www.w3.org/2001/XMLSchema"> +<!-- + Typendefinition Typ_Adresse +--> +<xs:complexType name="Typ_Adresse"> + <xs:sequence> + <xs:element name="Name1" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="35"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Name2" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="27"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Name3" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="40"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="ISO_3166" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:minLength value="2"></xs:minLength> + <xs:maxLength value="2"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Strasse" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="22"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Hausnummer" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="10"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Ort" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="40"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Postleitzahl" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="10"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Postfach" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="10"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="OrtPostfach" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="40"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="PLZPostfach" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="10"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Breitengrad" type="xs:decimal" minOccurs="0" maxOccurs="1" /> + <xs:element name="Laengengrad" type="xs:decimal" minOccurs="0" maxOccurs="1" /> + <xs:element name="Gemeindekennziffer" type="xs:nonNegativeInteger" minOccurs="0" maxOccurs="1" /> + </xs:sequence> +</xs:complexType> + +<!-- + Typendefinition Typ_Kontakt +--> +<xs:complexType name="Typ_Kontakt"> + <xs:sequence> + <xs:element name="Ansprechpartner" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="100"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Telefon" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="20"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Fax" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="20"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Mobiltelefon" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="20"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="EMail" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="50"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + </xs:sequence> +</xs:complexType> + +<!-- + Typendefinition Typ_Kunde +--> + + <xs:complexType name="Typ_Kunde"> + <xs:sequence> + <xs:element name="Adresse" type ="Typ_Adresse" minOccurs="1" maxOccurs="1" /> + <xs:element name="Kontakt" type ="Typ_Kontakt" minOccurs="0" maxOccurs="1" /> + </xs:sequence> +</xs:complexType> + + +<!-- + Typendefinition Typ_Antriebsart +--> +<xs:simpleType name="Typ_Antriebsart"> + <xs:restriction base="xs:string"> + <xs:enumeration value = "" /> + <xs:enumeration value = "Frontantrieb" /> + <xs:enumeration value = "Heckantrieb" /> + <xs:enumeration value = "Allradantrieb" /> + <xs:enumeration value = "Elektroantrieb" /> + </xs:restriction> +</xs:simpleType> + +<!-- + Typendefinition Typ_GeraeteIdentifikation +--> +<xs:complexType name="Typ_GeraeteIdentifikation"> + <xs:sequence> + <xs:element name="MessgeraeteArt" nillable="false" > + <xs:simpleType> + <xs:restriction base="xs:nonNegativeInteger"> + <!-- Ungültig --> + <xs:enumeration value ="0" /> + <!-- 01: Waage NSW (Klasse I oder II)--> + <xs:enumeration value ="1" /> + <!-- 11: Waage NSW (Klasse III oder IIII)--> + <xs:enumeration value ="11" /> + <!-- 12: Waage NSW (Klasse III oder IIII) für Kassensystem (POS) --> + <xs:enumeration value ="12" /> + <!-- 13: PC für Kassensystem (POS)--> + <xs:enumeration value ="13" /> + <!-- 14: Waage NSW (Klasse III oder IIII) für Kassensystem (POS & PC)--> + <xs:enumeration value ="14" /> + <!-- 15: Selbsttätige Waage --> + <xs:enumeration value ="15" /> + <!-- 21: Kraftstoffzapfanlage ohne Mengenumwerter --> + <xs:enumeration value ="21" /> + <!-- 22: Kraftstoffzapfanlage mit Mengenumwerter --> + <xs:enumeration value ="22" /> + <!-- 23: Tankwagen--> + <xs:enumeration value ="23" /> + <!-- 31: Abgasmessgerät für CO --> + <xs:enumeration value ="31" /> + <!-- 32: Abgasmessgerät für CO, CO2, HC, O2 --> + <xs:enumeration value ="32" /> + <!-- 33: Abgasmessgerät für Dieselruß --> + <xs:enumeration value ="33" /> + <!-- 41: Reifenluftdruckmessgerät --> + <xs:enumeration value ="41" /> + <!-- 42: Reifenluftdruckautomat --> + <xs:enumeration value ="42" /> + <!-- 51: Taxameter --> + <xs:enumeration value ="51" /> + <!-- 52: Wegstreckenzähler --> + <xs:enumeration value ="52" /> + <!-- 99: Sonstige Messgeräte--> + <xs:enumeration value ="99" /> + </xs:restriction> + </xs:simpleType> + </xs:element> + + <xs:element name="Hersteller" nillable="false" > + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="50"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + + <xs:element name="Typ" nillable="false" > + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="50"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + + <xs:element name="Identnummer" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="50"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + + <xs:element name="KundenIdentnummer" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="50"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + + </xs:sequence> +</xs:complexType> + +<xs:complexType name="Typ_GeraeteBeschreibung"> + <xs:choice> + <xs:element name="Waage" type ="Typ_Geraet_Waage" /> + <xs:element name="Waage_POS" type ="Typ_Geraet_Waage_POS" /> + <xs:element name="PC_POS" type ="Typ_Geraet_PC_POS" /> + <xs:element name="Waage_PC_POS" type ="Typ_Geraet_Waage_PC_POS" /> + <xs:element name="Zapfpunkt" type ="Typ_Geraet_Zapfpunkt" /> + <xs:element name="Tankwagen" type ="Typ_Geraet_Tankwagen" /> + <xs:element name="Abgasmessgeraet" type ="Typ_Geraet_Abgasmessgeraet" /> + <xs:element name="Reifenluftdruck" type ="Typ_Geraet_Reifenluftdruck" /> + <xs:element name="Taxameter" type ="Typ_Geraet_Taxameter" /> + <xs:element name="Wegstreckenzaehler" type ="Typ_Geraet_WSZ" /> + <xs:element name="Sonstige_Messgeraete" type ="Typ_Geraet_Sonstige" /> + </xs:choice> +</xs:complexType> + +<!-- + Typendefinition Typ_Geraet_Waage +--> + <xs:complexType name="Typ_Geraet_Waage"> + <xs:sequence> + <xs:element name="Kenngroesse" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:pattern value="([0-9]){1,6}(\,[0-9]){0,1} (g|kg)"/> + <xs:maxLength value="20"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Bemerkung" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="1024"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + </xs:sequence> + </xs:complexType> + +<!-- + Typendefinition Typ_Geraet_Waage_POS +--> +<xs:complexType name="Typ_Geraet_Waage_POS"> + <xs:sequence> + <xs:element name="Kenngroesse" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:pattern value="([0-9]){1,5}(\,[0-9]){0,1} (g|kg)"/> + <xs:maxLength value="10"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Pruefort_Messgeraet" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="20"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Bemerkung" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="1024"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + </xs:sequence> +</xs:complexType> + + + <!-- + Typendefinition Typ_Geraet_PC_POS + --> + <xs:complexType name="Typ_Geraet_PC_POS"> + <xs:sequence> + <xs:element name="PC_Modell" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="20"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="PC_Seriennummer" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="20"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Bemerkung" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="1024"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + </xs:sequence> + </xs:complexType> + + + <!-- + Typendefinition Typ_Geraet_Waage_PC_POS + --> + <xs:complexType name="Typ_Geraet_Waage_PC_POS"> + <xs:sequence> + <xs:element name="Kenngroesse" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:pattern value="([0-9]){1,5}(\,[0-9]){0,1} (g|kg)"/> + <xs:maxLength value="10"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Pruefort_Messgeraet" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="20"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="PC_Modell" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="20"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="PC_Seriennummer" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="20"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Bemerkung" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="1024"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + </xs:sequence> + </xs:complexType> + + <!-- + Typendefinition Typ_Geraet_Zapfpunkt +--> +<xs:complexType name="Typ_Geraet_Zapfpunkt"> + <xs:sequence> + <xs:element name="Kenngroesse" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:pattern value="([0-9]){1,5}(\,[0-9]){0,1} (l/min)"/> + <xs:maxLength value="20"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Zapfsaeule" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="20"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Produkt" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="20"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Produktname" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="50"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Bemerkung" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="1024"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + </xs:sequence> +</xs:complexType> + +<!-- + Typendefinition Typ_Geraet_Tankwagen +--> +<xs:complexType name="Typ_Geraet_Tankwagen"> + <xs:sequence> + <xs:element name="Kenngroesse" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:pattern value="([0-9]){1,5}(\,[0-9]){0,1} (l/min)"/> + <xs:maxLength value="20"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="KFZ_Kennzeichen" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="12"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Produkt" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="20"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Produktname" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="50"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Bemerkung" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="1024"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + </xs:sequence> +</xs:complexType> + +<!-- + Typendefinition Typ_Geraet_Abgasmessgeraet +--> +<xs:complexType name="Typ_Geraet_Abgasmessgeraet"> + <xs:sequence> + <xs:element name="Bemerkung" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="1024"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + </xs:sequence> +</xs:complexType> + +<!-- + Typendefinition Typ_Geraet_Reifenluftdruck +--> +<xs:complexType name="Typ_Geraet_Reifenluftdruck"> + <xs:sequence> + <xs:element name="Bemerkung" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="1024"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + </xs:sequence> +</xs:complexType> + + + <!-- + Typendefinition Typ_Geraet_Taxameter +--> + <xs:complexType name="Typ_Geraet_Taxameter"> + <xs:sequence> + <xs:element name="KFZ_Kennzeichen" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="12"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Fahrgestellnummer" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="50"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Antriebsart" nillable="false" type="Typ_Antriebsart"> + </xs:element> + <xs:element name="Tarifgebiet" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="100"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Ordnungsnummer" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="50"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Bemerkung" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="1024"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + </xs:sequence> + </xs:complexType> + +<!-- + Typendefinition Typ_Geraet_WSZ +--> + <xs:complexType name="Typ_Geraet_WSZ"> + <xs:sequence> + <xs:element name="KFZ_Kennzeichen" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="12"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Fahrgestellnummer" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="50"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Antriebsart" nillable="false" type="Typ_Antriebsart"> + </xs:element> + <xs:element name="Bemerkung" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="1024"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + </xs:sequence> + </xs:complexType> + +<!-- + Typendefinition Typ_Geraet_Sonstige +--> + <xs:complexType name="Typ_Geraet_Sonstige"> + <xs:sequence> + <xs:element name="Bemerkung" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="1024"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + </xs:sequence> + </xs:complexType> + + + + <!-- + Der Eichantrag selbst +--> + <xs:element name="DigitalerEichantrag"> + <xs:complexType> + <xs:sequence> + <xs:element name="Dokument"> + <xs:complexType> + <xs:sequence> + <xs:element name="Dateiname" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="100"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Zeitstempel" type="xs:dateTime" nillable="false"/> + <xs:element name="Test" type="xs:boolean" nillable="false"/> + </xs:sequence> + </xs:complexType> + </xs:element> + <xs:element name="Antragsteller"> + <xs:complexType> + <xs:sequence> + <xs:element name="Kuerzel" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="10"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Kennung" nillable="false"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="10"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="EMail" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="100"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Telefon" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="20"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + </xs:sequence> + </xs:complexType> + </xs:element> + <xs:element name="Geraete" minOccurs="1" maxOccurs="1"> + <xs:complexType> + <xs:sequence> + <xs:element maxOccurs="unbounded" name="Geraet" minOccurs="1"> + <xs:complexType> + <xs:sequence> + <xs:element name="SatzID" type="xs:nonNegativeInteger" nillable="false"/> + <xs:element name="Antragsjahr" type="xs:gYear" nillable="false"/> + <xs:element name="Eichfrist" type="xs:gYear" minOccurs="0" maxOccurs="1" /> + <xs:element name="Eichkennzeichen" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="50"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Bundesland" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:minLength value="2"></xs:minLength> + <xs:maxLength value="2"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Dienststelle" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="50"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Termin" type="xs:dateTime" minOccurs="0" maxOccurs="1" /> + <xs:element name="TerminDauer" type="xs:time" minOccurs="0" maxOccurs="1" /> + <xs:element name="Eichschein" type="xs:boolean" nillable="false"/> + <xs:element name="Bestellnummer" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="50"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Rechnungszustellung" nillable="false" default="0"> + <xs:simpleType> + <xs:restriction base="xs:nonNegativeInteger"> + <xs:enumeration value ="0" /> + <xs:enumeration value ="1" /> + <xs:enumeration value ="2" /> + <xs:enumeration value ="3" /> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Standort" type="Typ_Kunde" nillable="false" /> + <xs:element name="Rechnungsempfaenger" type="Typ_Kunde" minOccurs="0" maxOccurs="1" /> + <xs:element name="Zustelladresse" type="Typ_Kunde" minOccurs="0" maxOccurs="1" /> + <xs:element name="Geraeteidentifikation" type="Typ_GeraeteIdentifikation" nillable="false" /> + <xs:element name="Geraetebeschreibung" type ="Typ_GeraeteBeschreibung" nillable="false" /> + </xs:sequence> + </xs:complexType> + </xs:element> + </xs:sequence> + </xs:complexType> + </xs:element> + </xs:sequence> + </xs:complexType> + </xs:element> +</xs:schema> + diff --git a/doc/2023-DigitalerEichantrag_Antwort.xsd b/doc/2023-DigitalerEichantrag_Antwort.xsd new file mode 100644 index 0000000000000000000000000000000000000000..f745283dc568a767c0b8a110643b73bbaf017ca5 --- /dev/null +++ b/doc/2023-DigitalerEichantrag_Antwort.xsd @@ -0,0 +1,53 @@ +<?xml version="1.0" encoding="utf-8"?> +<xs:schema attributeFormDefault="unqualified" elementFormDefault="qualified" xmlns:xs="http://www.w3.org/2001/XMLSchema"> + +<!-- + Die Antwort auf die einzelnen Eichanträge +--> + <xs:element name="DigitalerEichantrag_Antwort"> + <xs:complexType> + <xs:sequence> + <xs:element name="Dokument"> + <xs:complexType> + <xs:sequence> + <xs:element name="Dateiname" type="xs:string" nillable="false"/> + <xs:element name="Zeitstempel" type="xs:dateTime" nillable="false"/> + </xs:sequence> + </xs:complexType> + </xs:element> + <xs:element name="Geraete" minOccurs="1" maxOccurs="1"> + <xs:complexType> + <xs:sequence> + <xs:element maxOccurs="unbounded" name="Geraet" minOccurs="1"> + <xs:complexType> + <xs:sequence> + <xs:element name="SatzID" type="xs:nonNegativeInteger"/> + <xs:element name="Antragsjahr" type="xs:gYear"/> + <xs:element name="Bestellnummer" minOccurs="0" maxOccurs="1"> + <xs:simpleType> + <xs:restriction base="xs:string"> + <xs:maxLength value="50"></xs:maxLength> + </xs:restriction> + </xs:simpleType> + </xs:element> + <xs:element name="Ergebnis" type="xs:string"/> + <xs:choice> + <xs:sequence> + <xs:element name="Fehlermeldung" type="xs:string"/> + </xs:sequence> + <xs:sequence> + <xs:element name="ZustaendigesBundesland" type="xs:string"/> + <xs:element name="ZustaendigesEichamt" type="xs:string"/> + </xs:sequence> + </xs:choice> + </xs:sequence> + </xs:complexType> + </xs:element> + </xs:sequence> + </xs:complexType> + </xs:element> + </xs:sequence> + </xs:complexType> + </xs:element> +</xs:schema> + diff --git a/doc/example-2021.xml b/doc/example-2021.xml new file mode 100644 index 0000000000000000000000000000000000000000..f5aeeac5e9385d19449bf442d633b66b74b88ae3 --- /dev/null +++ b/doc/example-2021.xml @@ -0,0 +1,86 @@ + + +<?xml version="1.0" encoding="UTF-8"?> +<DigitalerEichantrag> + <Dokument> + <Dateiname>Eichantrag_PTBInstitutBerlinMCNode_20210616_074616200.xml</Dateiname> + <Zeitstempel>2021-06-16T07:46:16.200</Zeitstempel> + <Test>true</Test> + </Dokument> + <Antragsteller> + <Kuerzel>PTB-Test</Kuerzel> + <Kennung>PTB-Test</Kennung> + </Antragsteller> + <Geraete> + <Geraet> + <SatzID>1</SatzID> + <Antragsjahr>2021</Antragsjahr> + <Eichschein>false</Eichschein> + <Bestellnummer>20-08-21_111111</Bestellnummer> + <Rechnungszustellung>2</Rechnungszustellung> + <Standort> + <Adresse> + <Name1>PTB-Test GmbH</Name1> + <Name2>Testkunde</Name2> + <Name3>Demonstrator-Raum</Name3> + <Strasse>Salzufer</Strasse> + <Hausnummer>12</Hausnummer> + <Ort>Berlin</Ort> + <Postleitzahl>10587</Postleitzahl> + <ISO_3166>DE</ISO_3166> + </Adresse> + <Kontakt> + <Ansprechpartner>Sekretaritat</Ansprechpartner> + <Telefon>03012344567</Telefon> + <Fax>03012344567</Fax> + <EMail>admin@node1.cloud.mc</EMail> + </Kontakt> + </Standort> + <Rechnungsempfaenger> + <Adresse> + <Name1>PTB-Test Dienstleistung GmbH</Name1> + <ISO_3166>DE</ISO_3166> + <Strasse>Rudolf-Diesel-Str.</Strasse> + <Hausnummer>16-18</Hausnummer> + <Ort>Bad Kreuznach</Ort> + <Postleitzahl>55543</Postleitzahl> + </Adresse> + <Kontakt> + <Ansprechpartner>Buchhaltung</Ansprechpartner> + <Telefon>03012344567</Telefon> + <Fax>03012344567</Fax> + <EMail>info@cloud.mc</EMail> + </Kontakt> + </Rechnungsempfaenger> + <Zustelladresse> + <Adresse> + <Name1>MC Node Org</Name1> + <Strasse>Abbestr.</Strasse> + <Hausnummer>2-12</Hausnummer> + <Ort>Berlin</Ort> + <Postleitzahl>10587</Postleitzahl> + <ISO_3166>DE</ISO_3166> + </Adresse> + <Kontakt> + <Ansprechpartner>Sekretaritat</Ansprechpartner> + <Telefon>030-1234-4567</Telefon> + <Fax>030-1234-4567</Fax> + <EMail>info@cloud.mc</EMail> + </Kontakt> + </Zustelladresse> + <Geraeteidentifikation> + <MessgeraeteArt>12</MessgeraeteArt> + <Hersteller>Super TestScale</Hersteller> + <MessgeraeteTyp>GJL-3232 ST 17</MessgeraeteTyp> + <Identnummer>LGJ-112-QC-12607</Identnummer> + </Geraeteidentifikation> + <Geraetebeschreibung> + <Waage_POS> + <Kenngroesse>15 kg</Kenngroesse> + <Pruefort_Messgeraet>Haus 12, Raum 22</Pruefort_Messgeraet> + </Waage_POS> + </Geraetebeschreibung> + </Geraet> + </Geraete> +</DigitalerEichantrag> + diff --git a/doc/mc-2021.xml b/doc/mc-2021.xml new file mode 100644 index 0000000000000000000000000000000000000000..2174770d40e7511a24f1d8b530b399bdded899af --- /dev/null +++ b/doc/mc-2021.xml @@ -0,0 +1,83 @@ +<?xml version="1.0" encoding="UTF-8"?> +<DigitalerEichantrag> + <Dokument> + <Dateiname>Eichantrag_PTBInstitutBerlinMCNode_20210616_074616200.xml</Dateiname> + <Zeitstempel>2021-06-16T07:46:16.200</Zeitstempel> + <Test>true</Test> + </Dokument> + <Antragsteller> + <Kuerzel>PTB-Test</Kuerzel> + <Kennung>PTB-Test</Kennung> + </Antragsteller> + <Geraete> + <Geraet> + <SatzID>1</SatzID> + <Antragsjahr>2021</Antragsjahr> + <Eichschein>false</Eichschein> + <Bestellnummer>20-08-21_111111</Bestellnummer> + <Rechnungszustellung>2</Rechnungszustellung> + <Standort> + <Adresse> + <Name1>PTB</Name1> + <Name2>Node Kunde</Name2> + <Name3>Demonstrator-Raum</Name3> + <Strasse>Salzufer</Strasse> + <Hausnummer>12</Hausnummer> + <Ort>Berlin</Ort> + <Postleitzahl>10587</Postleitzahl> + <ISO_3166>DE</ISO_3166> + </Adresse> + <Kontakt> + <Ansprechpartner>Mr. Node</Ansprechpartner> + <Telefon>030-1234-4567</Telefon> + <Fax>030-1234-4567</Fax> + <EMail>admin@node1.cloud.mc</EMail> + </Kontakt> + </Standort> + <Rechnungsempfaenger> + <Adresse> + <Name1>MC Node Org</Name1> + <Strasse>Abbestr.</Strasse> + <Hausnummer>2-12</Hausnummer> + <Ort>Berlin</Ort> + <Postleitzahl>10587</Postleitzahl> + <ISO_3166>DE</ISO_3166> + </Adresse> + <Kontakt> + <Ansprechpartner>Buchhaltung</Ansprechpartner> + <Telefon>030-1234-4567</Telefon> + <Fax>030-1234-4567</Fax> + <EMail>info@cloud.mc</EMail> + </Kontakt> + </Rechnungsempfaenger> + <Zustelladresse> + <Adresse> + <Name1>MC Node Org</Name1> + <Strasse>Abbestr.</Strasse> + <Hausnummer>2-12</Hausnummer> + <Ort>Berlin</Ort> + <Postleitzahl>10587</Postleitzahl> + <ISO_3166>DE</ISO_3166> + </Adresse> + <Kontakt> + <Ansprechpartner>Sekretaritat</Ansprechpartner> + <Telefon>030-1234-4567</Telefon> + <Fax>030-1234-4567</Fax> + <EMail>info@cloud.mc</EMail> + </Kontakt> + </Zustelladresse> + <Geraeteidentifikation> + <MessgeraeteArt>12</MessgeraeteArt> + <Hersteller>MAN1</Hersteller> + <MessgeraeteTyp>Superscale 3001</MessgeraeteTyp> + <Identnummer>MAN1.WA01.00031</Identnummer> + <KundenIdentnummer /> + </Geraeteidentifikation> + <Waage_POS> + <Kenngroesse>15 kg</Kenngroesse> + <PruefortMessgeraet>Unterm Demonstrator-</PruefortMessgeraet> + </Waage_POS> + </Geraet> + </Geraete> +</DigitalerEichantrag> + diff --git a/setup/S00_setup.sh b/setup/S00_setup.sh new file mode 100644 index 0000000000000000000000000000000000000000..7b6058ebe4b856f5e4353735956cb1c7cfcfdb77 --- /dev/null +++ b/setup/S00_setup.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +set -e + +# excute mc specific config tasks +. S09_backend.sh +. S10_db.sh +. S16_bli.sh + +echo 'MC-Setup completed ' diff --git a/setup/S09_backend.sh b/setup/S09_backend.sh new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/setup/S10_db.sh b/setup/S10_db.sh new file mode 100644 index 0000000000000000000000000000000000000000..4af6da460287afca2000717fef914455019d1145 --- /dev/null +++ b/setup/S10_db.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +################### +# database # +################### +# set db parameters for db +cat >>/srv/etc/dp-db/db.ini <<EOF +EOF diff --git a/setup/S16_bli.sh b/setup/S16_bli.sh new file mode 100644 index 0000000000000000000000000000000000000000..1f0614eb86858744dc5cd1384d408e778f9a6fbc --- /dev/null +++ b/setup/S16_bli.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +################### +# bli # +################### +echo -e "${HL}configuring bli${NC}" +# set parameters for bli +cat >>/srv/etc/dp-bli/bli.ini <<EOF +EOF diff --git a/sql/01_mc_tables.sql b/sql/01_mc_tables.sql new file mode 100644 index 0000000000000000000000000000000000000000..468f0dbb5c0c2da60e1a36358abd07bacc395b94 --- /dev/null +++ b/sql/01_mc_tables.sql @@ -0,0 +1,68 @@ +DROP TABLE IF EXISTS mc_measuring_instruments CASCADE; +DROP TABLE IF EXISTS mc_product_series CASCADE; +DROP TABLE IF EXISTS mc_mi_device_types CASCADE; +DROP TABLE IF EXISTS mc_series_files CASCADE; +DROP TABLE IF EXISTS mc_device_files CASCADE; +-- MCUIDs:: +-- mcudid = producer . series . device +-- mcusid = producer . series + + +CREATE TABLE mc_mi_device_types ( --all Nodes have the same + mid_type_id INTEGER PRIMARY KEY, + m_unit VARCHAR NOT NULL, + m_description VARCHAR +); + +CREATE TABLE mc_product_series ( + mcusid VARCHAR PRIMARY KEY, + name VARCHAR, + manufacturer VARCHAR, + series_image_file_id INTEGER REFERENCES base_files(file_id) ON DELETE SET NULL, --Bild der Baureihe + latest_verified_software_version VARCHAR, + latest_conformity_assessment TIMESTAMPTZ, + mid_type INTEGER REFERENCES mc_mi_device_types(mid_type_id), + -- 2 Waagen-/Zapfsäulenspezifische Werte + quantity VARCHAR, --Kenngröße + conformity_statement VARCHAR --VDMA scales-compa.com link +); + +CREATE TABLE mc_mcusid_files ( + uid VARCHAR REFERENCES mc_product_series(mcusid) ON DELETE CASCADE, + file_id INTEGER REFERENCES base_files(file_id) ON DELETE CASCADE +); + +CREATE TABLE mc_measuring_instruments ( + mcudid VARCHAR PRIMARY KEY, --Identnummer zB fürs Gerät selbst + prod_year INTEGER, --Hertellungsjahr + last_update TIMESTAMPTZ, -- Zeitpunkt letztes Softwareupdate + is_verified BOOLEAN, -- ist gerade geeicht ja/nein? + mi_link VARCHAR, --link zum Messgerät selbst + mi_link_enc VARCHAR, --encoding für die Kommunikation mit dem Messgerät selbst (Json,Xml,Opcua) TODO: @MAD macht vielleicht mehr Sinn bei Series aber ich hab keine Lust ;p + last_calibration TIMESTAMPTZ, --Zeitpunkte letzte Eichung + location INTEGER REFERENCES base_locations(location_id) ON DELETE SET NULL, --Standort + verification_authority VARCHAR, --nodeid?! + owner VARCHAR, --nodeid?! + mcusid VARCHAR REFERENCES mc_product_series(mcusid), --series reference + product_owner INTEGER REFERENCES base_persons(person_id) ON DELETE SET NULL, --Verwender-Produktverantwortlicher + comments VARCHAR, --Bemerkungen + -- 1 Waagen-/Zapfsäulenspezifische Wert + test_location VARCHAR, --Prüfort + -- 2 zapfsäulenspezifische Werte + product VARCHAR, --Produkt (nehme an vom was gezapft wird) + product_name VARCHAR, --Produktname (nehme an vom was gezapft wird) + -- 4 spezifische Werte für Taxameter und Wegstreckenzähler + taxi_plate_number VARCHAR, --Kennzeichen des Taxis + tariff_zone VARCHAR, --Tarifgebiet + status_number VARCHAR, --Ordnungsnummer + propulsion_type VARCHAR --Antriebsart +); + +CREATE TABLE mc_mcudid_files ( + uid VARCHAR REFERENCES mc_measuring_instruments(mcudid) ON DELETE CASCADE, + file_id INTEGER REFERENCES base_files(file_id) ON DELETE CASCADE +); + + +ALTER TABLE mc_measuring_instruments ENABLE ROW LEVEL SECURITY; +ALTER TABLE mc_product_series ENABLE ROW LEVEL SECURITY; diff --git a/sql/02_mc_sequences.sql b/sql/02_mc_sequences.sql new file mode 100644 index 0000000000000000000000000000000000000000..81b1392d136be927d6d5687553ba91bbe3f88963 --- /dev/null +++ b/sql/02_mc_sequences.sql @@ -0,0 +1 @@ +GRANT USAGE,UPDATE ON ALL SEQUENCES IN SCHEMA public TO PUBLIC; diff --git a/sql/03_mc_user_rights.sql b/sql/03_mc_user_rights.sql new file mode 100644 index 0000000000000000000000000000000000000000..2d3604061e21c1c744fd487d69a3acd770d6f7f0 --- /dev/null +++ b/sql/03_mc_user_rights.sql @@ -0,0 +1,29 @@ +DROP POLICY IF EXISTS p_mc_product_series_group0 ON mc_product_series; +DROP POLICY IF EXISTS p_mc_measuring_instruments_group0 ON mc_measuring_instruments; + +-- create policies for group0 (ROW Access) +CREATE POLICY p_mc_product_series_group0 ON mc_product_series FOR ALL TO group0 USING (true); +CREATE POLICY p_mc_measuring_instruments_group0 ON mc_measuring_instruments FOR ALL TO group0 USING (true); + +-- define column Access +GRANT SELECT,INSERT,UPDATE,DELETE ON mc_product_series to group0; +GRANT SELECT,INSERT,UPDATE,DELETE ON mc_measuring_instruments TO group0; + +GRANT SELECT,INSERT,UPDATE,DELETE ON mc_mcudid_files to group0; +GRANT SELECT,INSERT,UPDATE,DELETE ON mc_mcusid_files to group0; + +GRANT SELECT ON mc_mi_device_types to group0; + + +-- create policies for group1 (ROW Access) +CREATE POLICY p_mc_product_series_group1 ON mc_product_series FOR ALL TO group1 USING (true); +CREATE POLICY p_mc_measuring_instruments_group1 ON mc_measuring_instruments FOR ALL TO group1 USING (true); + +-- define column access and sequence access +GRANT SELECT,INSERT,UPDATE,DELETE ON mc_product_series to group1; +GRANT SELECT,INSERT,UPDATE,DELETE ON mc_measuring_instruments TO group1; + +GRANT SELECT,INSERT,UPDATE,DELETE ON mc_mcudid_files to group1; +GRANT SELECT,INSERT,UPDATE,DELETE ON mc_mcusid_files to group1; + +GRANT SELECT ON mc_mi_device_types to group1; diff --git a/sql/04_mc_std_data.sql b/sql/04_mc_std_data.sql new file mode 100644 index 0000000000000000000000000000000000000000..8596e545b3642ac5d8582ed3516d80b7b44e0757 --- /dev/null +++ b/sql/04_mc_std_data.sql @@ -0,0 +1,8 @@ + +--fixed for all nodes +INSERT INTO mc_mi_device_types VALUES (0,'-','Nicht-Typisiertes Messgerät'), + (12,'Kilogramm','Waagen'), + (22,'Liter','Zapfsäulen'), + (51,'Kilometer','Taxameter'), + (99, 'Kilogramm', 'Sonstige Messgeräte') + ; diff --git a/sql/FULLSET.sql b/sql/FULLSET.sql new file mode 100644 index 0000000000000000000000000000000000000000..ab01d945112b33771e3300a3428be1d8b522939d --- /dev/null +++ b/sql/FULLSET.sql @@ -0,0 +1,74 @@ + +--local file references +INSERT INTO base_files VALUES (0, 'data/HandbuchPH1.pdf', 'application/pdf', 0, 1216773, ''), + (1, 'data/HandbuchPH2.pdf', 'application/pdf', 0, 1216773, ''), + (2, 'data/HandbuchPH3.pdf', 'application/pdf', 0, 1216773, ''), + (3, 'images/waage1.png', 'image/png', 3, 1068667, ''), + (4, 'images/waage2.png', 'image/png', 3, 1069393, ''), + (5, 'data/example.xml', 'application/xml', 6, 25419, ''), + (6, 'data/example.pdf', 'application/pdf', 6, 272036, ''), + (7, 'data/waegestueck1.png', 'image/png', 3, 69954, '') + ; + +--local location references +INSERT INTO base_locations VALUES (0, 'Produktionsstraße','911','13370','DE','Hersteller I'), + (1, 'Megamartweg','0815','51800','DE','Beispiel-Supermarkt Filiale 1'), + (2, 'Minimartstraße','1','17534','DE','Beispiel-Supermarkt Filiale 2') + ; + +--local person references +INSERT INTO base_persons VALUES (0, 'Effi Zienz','e.zienz@man1.org','EZ','Hersteller I Admin',0) + ; + +--update sequences ; +SELECT setval('base_locations_location_id_seq', CASE + WHEN (SELECT MAX(location_id) from "base_locations") <1 THEN 1 + ELSE (SELECT MAX(location_id) from "base_locations") + END +); +SELECT setval('base_files_file_id_seq', CASE + WHEN (SELECT MAX(file_id) from "base_files") <1 THEN 1 + ELSE (SELECT MAX(file_id) from "base_files") + END +); +SELECT setval('base_persons_person_id_seq', CASE + WHEN (SELECT MAX(person_id) from "base_persons") <1 THEN 1 + ELSE (SELECT MAX(person_id) from "base_persons") + END +); + +--global references + +-- mcusid, name, manufacturer, series_image_file_id, latest_verified_software_version, latest_conformity_assessment, mid_type, quantity, conformity_statement +INSERT INTO mc_product_series VALUES ('MAN1.WA01','Superscale 3000','MAN1', 3, 'V1.67.2', '2020-01-01 09:00:00', 12, '10 kg', 'https://scales-compa.com/assets/doc/DE-20-PB11-PTB001.pdf'), + ('MAN1.WA02','Superscale 3001','MAN1', 3, 'V0.92.9', NULL, 12, '15 kg', 'https://scales-compa.com/assets/doc/DE-20-PB11-PTB001.pdf'), + ('MAN1.WAX1','Powerwaage XL', 'MAN1', 4, 'V3.2.8', '2019-06-19 16:52:23.7', 12, '20 kg', 'https://scales-compa.com/assets/doc/DE-20-PB11-PTB001.pdf'), + ('NMI1.WS01','Wägestück 10g', NULL, 7, NULL, '1980-12-24 18:42:23.1', 99, '' , 'https://scales-compa.com/assets/doc/DE-20-PB11-PTB001.pdf') + ; + +--join table +INSERT INTO mc_mcusid_files VALUES ('MAN1.WA01',0), + ('MAN1.WA02',1), + ('MAN1.WAX1',2), + ('MAN1.WA01',3), + ('MAN1.WA02',3), + ('MAN1.WAX1',4), + ('NMI1.WS01',7) + ; + + +--global references +-- mcudid, prod_year, last_update, is_verified, mi_link, mi_link_enc, last_calibration, location, verification_authority, owner, mcusid, product_owner, comments, test_location, product, product_name, taxi_plate_number, tariff_zone, status_number, propulsion_type +INSERT INTO mc_measuring_instruments VALUES ('MAN1.WA01.00031',2017,'2019-04-24 11:24:19.007',TRUE, 'someMIlink', 'Json', NULL, 1, 'MS2', 'USER1', 'MAN1.WA01', 0, 'Eingang links vorne', 'Kasse 1', NULL, NULL, NULL, NULL, NULL, NULL), + ('MAN1.WA01.00042',2018,'2019-04-24 11:27:56.734',TRUE, 'someMIlink', 'Xml', NULL, 1, 'MS2', 'USER1', 'MAN1.WA01', 0, 'Eingang links mitte', 'Kasse 2', NULL, NULL, NULL, NULL, NULL, NULL), + ('MAN1.WA02.00001',2020, NULL, FALSE, NULL, NULL, NULL, 1, NULL, 'USER1', 'MAN1.WA02', 0, 'Eingang vorne rechts (inaktiv)','Kasse 4', NULL, NULL, NULL, NULL, NULL, NULL), + ('MAN1.WAX1.07331',2015,'2018-05-27 15:49:39.71', TRUE, NULL, NULL, NULL, 1, 'MS2', 'USER1', 'MAN1.WAX1', 0, 'Eingang links hinten', 'Kasse 3', NULL, NULL, NULL, NULL, NULL, NULL), + ('MAN1.WA01.00032',2017,'2019-06-23 17:54:49.321',TRUE, NULL, NULL, NULL, 2, 'MS1', 'USER1', 'MAN1.WA01', 0, 'Eingang links vorne', 'Kasse 1', NULL, NULL, NULL, NULL, NULL, NULL), + ('MAN1.WA01.00043',2018,'2019-06-23 17:57:36.123',TRUE, '10.0.0.7', 'Opcua', NULL, 2, 'MS1', 'USER1', 'MAN1.WA01', 0, 'Eingang links hinten', 'Kasse 2', NULL, NULL, NULL, NULL, NULL, NULL), + ('NMI1.WS01.00001',NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'MS1', 'USER1', 'NMI1.WS01', 0, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL) + ; + +--join table +INSERT INTO mc_mcudid_files VALUES ('NMI1.WS01.00001',5), + ('NMI1.WS01.00001',6) + ; diff --git a/sql/MAN1.sql b/sql/MAN1.sql new file mode 100644 index 0000000000000000000000000000000000000000..2e0102a43fed14417c20b0ec75f95482badf8720 --- /dev/null +++ b/sql/MAN1.sql @@ -0,0 +1,59 @@ + +--local file references +INSERT INTO base_files VALUES (0, 'data/HandbuchPH1.pdf', 'application/pdf', 0, 1216773, ''), + (1, 'data/HandbuchPH2.pdf', 'application/pdf', 0, 1216773, ''), + (2, 'data/HandbuchPH3.pdf', 'application/pdf', 0, 1216773, ''), + (3, 'images/waage1.png', 'image/png', 3, 1068667, ''), + (4, 'images/waage2.png', 'image/png', 3, 1069393, '') + ; + +--local location references +INSERT INTO base_locations VALUES (0, 'Produktionsstraße','911','13370','DE','Hersteller I') + ; + +--local person references +INSERT INTO base_persons VALUES (0, 'Effi Zienz','e.zienz@man1.org','EZ','Hersteller I Admin',0) + ; + + +--update sequences ; +SELECT setval('base_locations_location_id_seq', CASE + WHEN (SELECT MAX(location_id) from "base_locations") <1 THEN 1 + ELSE (SELECT MAX(location_id) from "base_locations") + END +); +SELECT setval('base_files_file_id_seq', CASE + WHEN (SELECT MAX(file_id) from "base_files") <1 THEN 1 + ELSE (SELECT MAX(file_id) from "base_files") + END +); +SELECT setval('base_persons_person_id_seq', CASE + WHEN (SELECT MAX(person_id) from "base_persons") <1 THEN 1 + ELSE (SELECT MAX(person_id) from "base_persons") + END +); + + +--global references +-- mcusid, name, manufacturer, series_image_file_id, latest_verified_software_version, latest_conformity_assessment, mid_type, quantity, conformity_statement +INSERT INTO mc_product_series VALUES ('MAN1.WA01','Superscale 3000','MAN1', 3, 'V1.67.2', NULL, 12, '10 kg', 'https://scales-compa.com/assets/doc/DE-20-PB11-PTB001.pdf'), + ('MAN1.WA02','Superscale 3001','MAN1', 3, 'V0.92.9', NULL, 12, '15 kg', 'https://scales-compa.com/assets/doc/DE-20-PB11-PTB001.pdf'), + ('MAN1.WAX1','Großwaage XL', 'MAN1', 4, 'V3.2.8', NULL, 12, '20 kg', 'https://scales-compa.com/assets/doc/DE-20-PB11-PTB001.pdf') + ; +--join table +INSERT INTO mc_mcusid_files VALUES ('MAN1.WA01',0), + ('MAN1.WA02',1), + ('MAN1.WAX1',2), + ('MAN1.WA01',3), + ('MAN1.WA02',3), + ('MAN1.WAX1',4) + ; +--global references +-- mcudid, prod_year, last_update, is_verified, mi_link, mi_link_enc, last_calibration, location, verification_authority, owner, mcusid, product_owner, comments, test_location, product, product_name, taxi_plate_number, tariff_zone, status_number, propulsion_type +INSERT INTO mc_measuring_instruments VALUES ('MAN1.WA01.00031',2017, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'TestOrg','MAN1.WA01', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL), + ('MAN1.WA01.00042',2018, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'TestOrg','MAN1.WA01', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL), + ('MAN1.WA02.00001',2020, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'TestOrg','MAN1.WA02', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL), + ('MAN1.WAX1.07331',2015, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'TestOrg','MAN1.WAX1', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL), + ('MAN1.WA01.00032',2017, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'TestOrg','MAN1.WA01', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL), + ('MAN1.WA01.00043',2018, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'TestOrg','MAN1.WA01', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL) + ; diff --git a/sql/MAN2.sql b/sql/MAN2.sql new file mode 100644 index 0000000000000000000000000000000000000000..3d094c5cb5d3279b4e0bc72bc66655014ff16eda --- /dev/null +++ b/sql/MAN2.sql @@ -0,0 +1,50 @@ + +--local file references +INSERT INTO base_files VALUES (0, 'data/HandbuchPH1.pdf', 'application/pdf', 0, 1216773, ''), + (1, 'data/HandbuchPH2.pdf', 'application/pdf', 0, 1216773, ''), + (2, 'images/zapfsaeule1.png', 'image/png', 3, 295384, ''), + (3, 'images/taxameter1.png', 'image/png', 3, 839397, '') + ; + +--local location references +INSERT INTO base_locations VALUES (0, 'Wolkenweg','1','00001','HR','Hersteller II') + ; + +--local person references +INSERT INTO base_persons VALUES (0, 'Wilhelm Wichtig','WW@cloud.gov','WW','HSII Hauptadmin',0) + ; +--update sequences ; +SELECT setval('base_locations_location_id_seq', CASE + WHEN (SELECT MAX(location_id) from "base_locations") <1 THEN 1 + ELSE (SELECT MAX(location_id) from "base_locations") + END +); +SELECT setval('base_files_file_id_seq', CASE + WHEN (SELECT MAX(file_id) from "base_files") <1 THEN 1 + ELSE (SELECT MAX(file_id) from "base_files") + END +); +SELECT setval('base_persons_person_id_seq', CASE + WHEN (SELECT MAX(person_id) from "base_persons") <1 THEN 1 + ELSE (SELECT MAX(person_id) from "base_persons") + END +); + + +--global references +-- mcusid, name, manufacturer, series_image_file_id, latest_verified_software_version, latest_conformity_assessment, mid_type, quantity, conformity_statement +INSERT INTO mc_product_series VALUES ('MAN2.Z1','Test-Zapfsäule', 'MAN2', 2, 'V2.7.1', NULL, 22, '100 l', 'https://scales-compa.com/assets/doc/DE-20-PB11-PTB001.pdf'), + ('MAN2.SMX','Test-Taxameter','MAN2', 3, 'V1.3.2', NULL, 51, NULL, 'https://scales-compa.com/assets/doc/DE-20-PB11-PTB001.pdf') + ; +--join table +INSERT INTO mc_mcusid_files VALUES ('MAN2.Z1', 0), + ('MAN2.SMX',1), + ('MAN2.Z1', 2), + ('MAN2.SMX',3) + ; +-- mcudid, prod_year, last_update, is_verified, mi_link, mi_link_enc, last_calibration, location, verification_authority, owner, mcusid, product_owner, comments, test_location, product, product_name, taxi_plate_number, tariff_zone, status_number, propulsion_type +INSERT INTO mc_measuring_instruments VALUES ('MAN2.Z1.12345', 2019, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'TestCorp','MAN2.Z1', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL), + ('MAN2.SMX.00128',2018, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'TestCorp','MAN2.SMX', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL), + ('MAN2.SMX.00256',2018, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'TestCorp','MAN2.SMX', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL), + ('MAN2.SMX.00512',2018, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'TestCorp','MAN2.SMX', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL) + ; diff --git a/sql/MS1.sql b/sql/MS1.sql new file mode 100644 index 0000000000000000000000000000000000000000..c8b81c803a42f9b35d34e4dd569f955b45e5204b --- /dev/null +++ b/sql/MS1.sql @@ -0,0 +1,58 @@ + +--local file references +INSERT INTO base_files VALUES (0, 'data/PProtPH1.pdf', 'application/pdf', 9, 1216773, ''), + (1, 'data/PProtPH2.pdf', 'application/pdf', 9, 1216773, ''), + (2, 'data/PProtPH3.pdf', 'application/pdf', 9, 1216773, ''), + (3, 'data/PProtPH4.pdf', 'application/pdf', 9, 1216773, ''), + (4, 'data/PProtPH5.pdf', 'application/pdf', 9, 1216773, ''), + (5, 'data/PProtPH6.pdf', 'application/pdf', 9, 1216773, '') + ; + +--local location references +INSERT INTO base_locations VALUES (0, 'Eichweg','1','55555','DE','Eichbehörde I') + ; + +--local person references +INSERT INTO base_persons VALUES (0, 'Precilla Präzise','pp@eb1.de','PP','Meisterprüferin I',0) + ; +--update sequences +SELECT setval('base_locations_location_id_seq', CASE + WHEN (SELECT MAX(location_id) from "base_locations") <1 THEN 1 + ELSE (SELECT MAX(location_id) from "base_locations") + END +); +SELECT setval('base_files_file_id_seq', CASE + WHEN (SELECT MAX(file_id) from "base_files") <1 THEN 1 + ELSE (SELECT MAX(file_id) from "base_files") + END +); +SELECT setval('base_persons_person_id_seq', CASE + WHEN (SELECT MAX(person_id) from "base_persons") <1 THEN 1 + ELSE (SELECT MAX(person_id) from "base_persons") + END +); + + +--global references +-- mcusid, name, manufacturer, series_image_file_id, latest_verified_software_version, latest_conformity_assessment, mid_type, quantity, conformity_statement +INSERT INTO mc_product_series VALUES ('MAN1.WA01',NULL,NULL,NULL,NULL,NULL,12,NULL,NULL), + ('MAN2.SMX',NULL,NULL,NULL,NULL,NULL,51,NULL,NULL), + ('MAN2.Z1','Tester-Zapfsäule',NULL,NULL,NULL,NULL,22,NULL,NULL) + ; +--global references +-- mcudid, prod_year, last_update, is_verified, mi_link, mi_link_enc, last_calibration, location, verification_authority, owner, mcusid, product_owner, comments, test_location, product, product_name, taxi_plate_number, tariff_zone, status_number, propulsion_type +INSERT INTO mc_measuring_instruments VALUES ('MAN1.WA01.00032',NULL,NULL,TRUE,NULL,NULL,'2019-06-25 17:47:18.903',NULL,'MS1','TestOrg','MAN1.WA01',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL), + ('MAN1.WA01.00043',NULL,NULL,TRUE,NULL,NULL,'2019-06-27 08:12:14.531',NULL,'MS1','TestOrg','MAN1.WA01',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL), + ('MAN2.Z1.12345',NULL,NULL,TRUE,NULL,NULL,'2019-11-11 14:53:28.8175',NULL,'MS1','TestCorp','MAN2.Z1',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL), + ('MAN2.SMX.00128',NULL,NULL,TRUE,NULL,NULL,'2019-11-11 15:12:41.1758',NULL,'MS1','TestCorp','MAN2.SMX',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL), + ('MAN2.SMX.00256',NULL,NULL,TRUE,NULL,NULL,'2019-11-11 15:23:58.7581',NULL,'MS1','TestCorp','MAN2.SMX',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL), + ('MAN2.SMX.00512',NULL,NULL,TRUE,NULL,NULL,'2019-05-02 09:35:56.5817',NULL,'MS1','TestCorp','MAN2.SMX',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL) + ; +--join table measurinng_instruments <> files +INSERT INTO mc_mcudid_files VALUES ('MAN1.WA01.00032',0), + ('MAN1.WA01.00043',1), + ('MAN2.Z1.12345',2), + ('MAN2.SMX.00128',3), + ('MAN2.SMX.00256',4), + ('MAN2.SMX.00512',5) + ; diff --git a/sql/MS2.sql b/sql/MS2.sql new file mode 100644 index 0000000000000000000000000000000000000000..9d3e55e80d46b86079723d7ba3529d346ad5583c --- /dev/null +++ b/sql/MS2.sql @@ -0,0 +1,50 @@ + +--local file references +INSERT INTO base_files VALUES (0, 'data/PProtPH1.pdf', 'application/pdf', 9, 1216773, ''), + (1, 'data/PProtPH2.pdf', 'application/pdf', 9, 1216773, ''), + (2, 'data/PProtPH3.pdf', 'application/pdf', 9, 1216773, ''), + (3, 'data/PProtPH4.pdf', 'application/pdf', 9, 1216773, '') + ; + +--local location references +INSERT INTO base_locations VALUES (0, 'Eichweg','2','66666','DE','TestEichbehörde II') + ; + +--local person references +INSERT INTO base_persons VALUES (0, 'Dr. Gründlich','dr.gruen@eb2.de','DRG','Meisterprüfer II',0) + ; +--update sequences +SELECT setval('base_locations_location_id_seq', CASE + WHEN (SELECT MAX(location_id) from "base_locations") <1 THEN 1 + ELSE (SELECT MAX(location_id) from "base_locations") + END +); +SELECT setval('base_files_file_id_seq', CASE + WHEN (SELECT MAX(file_id) from "base_files") <1 THEN 1 + ELSE (SELECT MAX(file_id) from "base_files") + END +); +SELECT setval('base_persons_person_id_seq', CASE + WHEN (SELECT MAX(person_id) from "base_persons") <1 THEN 1 + ELSE (SELECT MAX(person_id) from "base_persons") + END +); + + +--global references +INSERT INTO mc_product_series VALUES ('MAN1.WA01',NULL,NULL,NULL,NULL,NULL,12,NULL,NULL), + ('MAN1.WA02',NULL,NULL,NULL,NULL,NULL,12,NULL,NULL), + ('MAN1.WAX1','Großwaage XXL',NULL,NULL,NULL,NULL,12,NULL,NULL) + ; +--global references +INSERT INTO mc_measuring_instruments VALUES ('MAN1.WA01.00031',NULL,NULL,TRUE,NULL,NULL,'2019-05-02 09:17:28.817',NULL,'MS2','TestOrg','MAN1.WA01',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL), + ('MAN1.WA01.00042',NULL,NULL,TRUE,NULL,NULL,'2019-05-02 09:26:41.178',NULL,'MS2','TestOrg','MAN1.WA01',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL), + ('MAN1.WA02.00001',NULL,NULL,TRUE,NULL,NULL,NULL,NULL,'MS2','TestOrg','MAN1.WA02',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL), + ('MAN1.WAX1.07331',NULL,NULL,TRUE,NULL,NULL,'2019-05-02 09:35:56.781',NULL,'MS2','TestOrg','MAN1.WAX1',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL) + ; +--join table measurinng_instruments <> files +INSERT INTO mc_mcudid_files VALUES ('MAN1.WA01.00031',0), + ('MAN1.WA01.00042',1), + ('MAN1.WA02.00001',2), + ('MAN1.WAX1.07331',3) + ; diff --git a/sql/NMI1.sql b/sql/NMI1.sql new file mode 100644 index 0000000000000000000000000000000000000000..92e5dcaf0baa0c8f1410f2497bdede36862ddf62 --- /dev/null +++ b/sql/NMI1.sql @@ -0,0 +1,56 @@ + +--local file references +INSERT INTO base_files VALUES (0, 'data/CAPH1.pdf', 'application/pdf', 8, 1216773, ''), + (1, 'data/CAPH2a.pdf', 'application/pdf', 8, 1216773, ''), + (2, 'data/CAPH2b.pdf', 'application/pdf', 8, 1216773, ''), + (3, 'data/CAPH3.pdf', 'application/pdf', 8, 1216773, ''), + (4, 'data/CAPH4.pdf', 'application/pdf', 8, 1216773, ''), + (5, 'data/example.xml', 'application/xml', 6, 25419, ''), + (6, 'data/example.pdf', 'application/pdf', 6, 272036, ''), + (7, 'data/waegestueck1.png', 'image/png', 3, 69954, '') + ; + +--local location references +INSERT INTO base_locations VALUES (0, 'Abbestraße','2-12','10587','DE','PTB') + ; + +--local person references +INSERT INTO base_persons VALUES (0, 'Victor Vorsicht','vv@ptb.de','VV','PTB Admin00',0) + ; +--update sequences +SELECT setval('base_locations_location_id_seq', CASE + WHEN (SELECT MAX(location_id) from "base_locations") <1 THEN 1 + ELSE (SELECT MAX(location_id) from "base_locations") + END +); +SELECT setval('base_files_file_id_seq', CASE + WHEN (SELECT MAX(file_id) from "base_files") <1 THEN 1 + ELSE (SELECT MAX(file_id) from "base_files") + END +); +SELECT setval('base_persons_person_id_seq', CASE + WHEN (SELECT MAX(person_id) from "base_persons") <1 THEN 1 + ELSE (SELECT MAX(person_id) from "base_persons") + END +); + +--global references +INSERT INTO mc_product_series VALUES ('MAN1.WA01','Superscale 3000',NULL,NULL,NULL,'2020-01-01 09:00:00.0',12,NULL), + ('MAN1.WA02','Superscale 3001',NULL,NULL,NULL,NULL,12,NULL), + ('MAN2.Z1','Test-Zapfsäule XL',NULL,NULL,NULL,'2019-04-01 11:11:11.111',22,NULL), + ('MAN1.WAX1','Großwaage XXXL',NULL,NULL,NULL,'2019-06-19 16:52:23.7',12,NULL), + ('MAN2.SMX','Test-Taxameter',NULL,NULL,NULL,'2019-12-24 18:42:23.1',51,NULL), + ('NMI1.WS01','Wägestück 10g',NULL,7,NULL,'1980-12-24 18:42:23.1',99,NULL) + ; +INSERT INTO mc_measuring_instruments VALUES ('NMI1.WS01.00001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,'MS1','TestOrg','NMI1.WS01',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL) + ; +--join table +INSERT INTO mc_mcusid_files VALUES ('MAN1.WA01',0), + ('MAN2.Z1',3), + ('MAN1.WAX1',1), + ('MAN1.WAX1',2), + ('MAN2.SMX',4), + ('NMI1.WS01',7) + ; +INSERT INTO mc_mcudid_files VALUES ('NMI1.WS01.00001',5), + ('NMI1.WS01.00001',6); diff --git a/sql/README.txt b/sql/README.txt new file mode 100644 index 0000000000000000000000000000000000000000..cca008b1f0210b395643a34d9c9b154c54d04a61 --- /dev/null +++ b/sql/README.txt @@ -0,0 +1,46 @@ +### Aim ### +This initialization scheme can be adjusted by changing the content of the three +files contained in it. At the same time it circumscribes a logic for the +access-rights management on the database and the structure of the Postgres +server. + +### Files in this folder ### + -- pg_ini.json +This is the origin for the setup. The setup function in +service-lib::db_utils::hello_postgres needs the path to this file. It contains +paths to the other files as well as a schema title, the names of the database, +its administrator, its users (roles), and a list of empty policies to be created +(see comments below). + -- mcoat_tables.sql +This SQL script contains CREATE TABLE commands to set up the tables and fill +them with content. + -- mcoat_privileges.sql +Contains ALTER POLICY commands that set specific row access-rights for specific +users. + +### Process logic for the setup ### +1) A connection to the 'postgres' database as the 'postgres' user is established. +2) It is checked whether the roles or database specified in pg_ini.json already + exist. If yes, they are erased off the Postgres server. +3) Using the 'postgres' user the database and users are created. Then the + connection is closed. +4) Now a connection to the actual database (i.e. mcoat) is established as the + admin user. +5) The script mcoat_tables.sql is run to fill the database with content. +6) The function db_utils::hello_postgres::set_default is run to create empty + row-access policies (as specified in pg_ini.json). (see comments below). +7) The script mcoat_privileges.sql is run to fill the empty row policies. + +### Comment on access-rights management (POLICIES and PRIVILEGES) ### +See the wiki entry on the db service. + +### Why not everything in one large SQL script? ### +1) The database and admin has to be created before you can connect to them and + execute a script. +2) When resetting, one has to check for already existing entities (users, + database) on the Postgres server. This is much easier if a list of users and + the database name are contained in a deserializable file (like pg_ini.json). +3) The design choice for the access rights management is to have one POLICY for + almost every (user, table) pair. Creating all these policies in an SQL script + is tedious (SQL is not the most elegant language) and more importantly, the + script would not have access to the list of users in pg_ini.json. diff --git a/sql/USER1.sql b/sql/USER1.sql new file mode 100644 index 0000000000000000000000000000000000000000..3274c331bc7734fa85bc0bc9d87cc2465509000a --- /dev/null +++ b/sql/USER1.sql @@ -0,0 +1,51 @@ + +--local file references +INSERT INTO base_files VALUES (0, 'images/PH1.pdf', 'application/pdf', 3, 1216773, ''), + (1, 'images/PH2.pdf', 'application/pdf', 3, 1216773, ''), + (2, 'images/PH3.pdf', 'application/pdf', 3, 1216773, '') + ; + +--local location references +INSERT INTO base_locations VALUES (0, 'Megamartweg','0815','51800','DE','Beispiel-Supermarkt Filiale 1'), + (1, 'Minimartstraße','1','17534','DE','Beispiel-Supermarkt Filiale 2') + ; + +--local person references +INSERT INTO base_persons VALUES (0, 'Dr. No','no@google.com','NO','TestOrg',0) + ; +--update sequences +SELECT setval('base_locations_location_id_seq', CASE + WHEN (SELECT MAX(location_id) from "base_locations") <1 THEN 1 + ELSE (SELECT MAX(location_id) from "base_locations") + END +); +SELECT setval('base_files_file_id_seq', CASE + WHEN (SELECT MAX(file_id) from "base_files") <1 THEN 1 + ELSE (SELECT MAX(file_id) from "base_files") + END +); +SELECT setval('base_persons_person_id_seq', CASE + WHEN (SELECT MAX(person_id) from "base_persons") <1 THEN 1 + ELSE (SELECT MAX(person_id) from "base_persons") + END +); + + +--global references +INSERT INTO mc_product_series VALUES ('MAN1.WA01',NULL,'MAN1',NULL,'V1.67.2',NULL,12,NULL,NULL), + ('MAN1.WA02',NULL,'MAN1',NULL,'V1.02.9',NULL,12,NULL,NULL), + ('MAN1.WAX1','Großwaage XXXXL','MAN1',NULL,'V3.2.8',NULL,12,NULL,NULL) + ; +--join table +INSERT INTO mc_mcusid_files VALUES ('MAN1.WA01',0), + ('MAN1.WA02',1), + ('MAN1.WAX1',2) + ; + +INSERT INTO mc_measuring_instruments VALUES ('MAN1.WA01.00031',2017,'2019-04-24 11:24:19.007',NULL,'someMIlink','Json',NULL,0,'MS2','TestOrg','MAN1.WA01',0,'Eingang links vorne','Kasse 1',NULL,NULL,NULL,NULL,NULL,NULL), + ('MAN1.WA01.00042',2018,'2019-04-24 11:27:56.734',NULL,'someMIlink','Xml',NULL,0,'MS2','TestOrg','MAN1.WA01',0,'Eingang links mitte','Kasse 2',NULL,NULL,NULL,NULL,NULL,NULL), + ('MAN1.WA02.00001',2020,NULL,NULL,NULL,NULL,NULL,0,NULL,'TestOrg','MAN1.WA02',0,'Eingang vorne rechts (inaktiv)','Kasse 4',NULL,NULL,NULL,NULL,NULL,NULL), + ('MAN1.WAX1.07331',2015,'2018-05-27 15:49:39.71',NULL,NULL,NULL,NULL,0,'MS2','TestOrg','MAN1.WAX1',0,'Eingang links hinten','Kasse 3',NULL,NULL,NULL,NULL,NULL,NULL), + ('MAN1.WA01.00032',2017,'2019-06-23 17:54:49.321',NULL,NULL,NULL,NULL,1,'MS1','TestOrg','MAN1.WA01',0,'Eingang links vorne','Kasse 1',NULL,NULL,NULL,NULL,NULL,NULL), + ('MAN1.WA01.00043',2018,'2019-06-23 17:57:36.123',NULL,'10.0.0.7','Opcua',NULL,1,'MS1','TestOrg','MAN1.WA01',0,'Eingang links hinten','Kasse 2',NULL,NULL,NULL,NULL,NULL,NULL) + ; diff --git a/sql/USER2.sql b/sql/USER2.sql new file mode 100644 index 0000000000000000000000000000000000000000..4a7b324e971e31b35d185a38370c27ae7af355d3 --- /dev/null +++ b/sql/USER2.sql @@ -0,0 +1,45 @@ + +--local file references +INSERT INTO base_files VALUES (0, 'images/PH1.pdf', 'application/pdf', 3, 1216773, ''), + (1, 'images/PH2.pdf', 'application/pdf', 3, 1216773, '') + ; + +--local location references +INSERT INTO base_locations VALUES (0, 'Messstraße','123','12345','DE','TestStadt') + ; + +--local person references +INSERT INTO base_persons VALUES (0, 'Anton Anspruchsvoll','aa@bv.org','AA','TestCorp',0) + ; +--update sequences +SELECT setval('base_locations_location_id_seq', CASE + WHEN (SELECT MAX(location_id) from "base_locations") <1 THEN 1 + ELSE (SELECT MAX(location_id) from "base_locations") + END +); +SELECT setval('base_files_file_id_seq', CASE + WHEN (SELECT MAX(file_id) from "base_files") <1 THEN 1 + ELSE (SELECT MAX(file_id) from "base_files") + END +); +SELECT setval('base_persons_person_id_seq', CASE + WHEN (SELECT MAX(person_id) from "base_persons") <1 THEN 1 + ELSE (SELECT MAX(person_id) from "base_persons") + END +); + + +--global references +INSERT INTO mc_product_series VALUES ('MAN2.Z1','Zapfsäulen-Testname','MAN2',NULL,NULL,NULL,22,NULL,NULL), + ('MAN2.SMX',NULL,'MAN2',NULL,NULL,NULL,51,NULL,NULL) + ; +--join table +INSERT INTO mc_mcusid_files VALUES ('MAN2.Z1',0), + ('MAN2.SMX',1) + ; + +INSERT INTO mc_measuring_instruments VALUES ('MAN2.Z1.12345',2019,'2020-02-12 11:24:19.007',NULL,NULL,NULL,NULL,0,'MS1','TestCorp','MAN2.Z1',0,'nicht öffentlich','Hinter Garage links','Benzin','Super104',NULL,NULL,NULL,NULL), + ('MAN2.SMX.00128',2018,'2019-11-11 11:27:26.347',NULL,NULL,NULL,NULL,0,'MS1','TestCorp','MAN2.SMX',0,'Wagen 1',NULL,NULL,NULL,'B-W1-1337','Großraum Berlin','1337331','Elektroantrieb'), + ('MAN2.SMX.00256',2018,'2019-11-11 11:29:36.473',NULL,NULL,NULL,NULL,0,'MS1','TestCorp','MAN2.SMX',0,'Wagen 2',NULL,NULL,NULL,'B-W2-1024','Großraum Berlin','7654321','Elektroantrieb'), + ('MAN2.SMX.00512',2018,'2019-11-11 11:31:46.734',NULL,NULL,NULL,NULL,0,'MS1','TestCorp','MAN2.SMX',0,'Wagen 3',NULL,NULL,NULL,'B-W3-0110','Großraum Berlin','1234567','Allradantrieb') + ; diff --git a/sql/sql_init.sh b/sql/sql_init.sh new file mode 100644 index 0000000000000000000000000000000000000000..5712fbe089efcbb7b9337317dc025ff711cccbd4 --- /dev/null +++ b/sql/sql_init.sh @@ -0,0 +1,26 @@ +#!/bin/bash +db="base_db" +# echo -e "${HL}initializing mc database${NC}" +# psql ${PSQL_OPTS} < 00_mc_database.sql >&${output_target} + +# init db schema +echo -e "${HL}installing sql-tables${NC}" +psql ${PSQL_OPTS} -d ${db} <01_mc_tables.sql >&${output_target} + +# update sequences +echo -e "${HL}updating sql-sequences${NC}" +psql ${PSQL_OPTS} -d ${db} <02_mc_sequences.sql >&${output_target} + +#init db users +echo -e "${HL}installing sql-users${NC}" +psql ${PSQL_OPTS} -d ${db} <03_mc_user_rights.sql >&${output_target} + +# init db schema +echo -e "${HL}inserting std data${NC}" +psql ${PSQL_OPTS} -d ${db} <04_mc_std_data.sql >&${output_target} + +# #init db default data +# if [ -f /${db_data_dump} ]; then +# echo -e "${HL}inserting default data${NC}" +# psql ${PSQL_OPTS} -d ${db} < data.sql >&${output_target} +# fi diff --git a/src/backend/init.rs b/src/backend/init.rs new file mode 100644 index 0000000000000000000000000000000000000000..88ef948b35a2b51af65d7bf31536bea995c1e099 --- /dev/null +++ b/src/backend/init.rs @@ -0,0 +1,89 @@ +use backend_lib::init::BackendConfig; +use backend_lib::smarty_contracts_cookbook::ScCookbook; +use dpsfw::dp_utils::cache::{connect_local_db, SQLiteTable}; +pub use dpsfw::init::Initing; +use dpsfw_types::backend::index::UniversalIndexEntry; +use dpsfw_types::config::GlobalConfigured; +use dpsfw_types::error::PE; +use utils::cidinfo; +use utils::ini::{ini2string, ini2var, Ini}; + +pub use async_trait::async_trait; +use lazy_static::lazy_static; +use once_cell::sync::OnceCell; +use std::marker::PhantomData as PD; +use url::Url; + +// indexes +lazy_static! { + pub static ref MI_INDEX_TBL_NAME: String = format!("{}_index", crate::mc::uids::MCUD_ID); + pub static ref SERIES_INDEX_TBL_NAME: String = format!("{}_index", crate::mc::uids::MCUS_ID); +} + +/// global config for Backend +#[derive(Debug, Clone)] +pub struct McBackendConfig { + /// hardcoded DEMOL host when applying for reverification + pub demol_host: Url, //demol.berlin.ptb.de + /// key for connection to sym2gw + pub sym2_host: Option<Url>, // server xyz + /// key for connection to sym2gw + pub sym2_key: Option<String>, // /srv/etc/mc-backend/sym2.key + /// cert for connection to sym2gw + pub sym2_cert: Option<String>, // /srv/etc/mc-backend/sym2.crt + /// peer_cert for connection to sym2gw + pub sym2_peer_cert: Option<String>, // /srv/etc/mc-backend/sym2_peer.crt +} + +/// the global config for everything :D +static GLOBAL_MC_BACKEND_CONFIG: OnceCell<McBackendConfig> = OnceCell::new(); + +impl GlobalConfigured for McBackendConfig { + fn get_cell() -> &'static OnceCell<Self> { + &GLOBAL_MC_BACKEND_CONFIG + } +} + +/// backend init functions +pub struct McBackendInit {} +#[async_trait] +impl Initing for McBackendInit { + async fn init(self, ini: &Ini) -> Result<(), PE> { + let mc_backend = McBackendConfig { + sym2_host: ini2var("specific", "sym2_host", ini).ok(), + sym2_key: ini2string("specific", "sym2_key", ini).ok(), + sym2_cert: ini2string("specific", "sym2_cert", ini).ok(), + sym2_peer_cert: ini2string("specific", "sym2_peer_cert", ini).ok(), + demol_host: ini2var("specific", "demol_host", ini).unwrap(), + }; + mc_backend.init(); + + let conn = connect_local_db(&BackendConfig::get_global().backend_cache)?; + + cidinfo!("Initing with MC-Schema {}", crate::MC_SCHEMA_VERSION); + + let mi_index_tbl = + SQLiteTable::new_from_struct(&MI_INDEX_TBL_NAME, "", PD::<UniversalIndexEntry>); + if !mi_index_tbl.table_exists(&conn) { + mi_index_tbl.generate(&conn, Vec::new())?; + }; + let series_index_tbl = + SQLiteTable::new_from_struct(&SERIES_INDEX_TBL_NAME, "", PD::<UniversalIndexEntry>); + if !series_index_tbl.table_exists(&conn) { + series_index_tbl.generate(&conn, Vec::new())?; + }; + + Ok(()) + } +} + +impl McBackendInit { + /// get emc index tables for guids that will be use in message distribution + pub fn get_index_tbls() -> Vec<&'static String> { + vec![&MI_INDEX_TBL_NAME, &SERIES_INDEX_TBL_NAME] + } + /// get all emc processes + pub fn extend_sc_cookbook(book: &mut ScCookbook) { + book.extend(crate::backend::smarties::SC_COOKBOOK.clone()) + } +} diff --git a/src/backend/methods/mid.rs b/src/backend/methods/mid.rs new file mode 100644 index 0000000000000000000000000000000000000000..3bb82aa239e746702951e9bc0b41595e154e88b6 --- /dev/null +++ b/src/backend/methods/mid.rs @@ -0,0 +1,332 @@ +/*!************************************************************************************************** +Contains all ARM methods known to the backend. +A method is implemented with the RawRequest, an input struct, an output struct and an error struct, +returns a StatusCode and an Option<PE>, +non fatal errors are pushed to error-vector and +fatal errors are returned +**************************************************************************************************!*/ + +use async_trait::async_trait; +use std::collections::HashMap; +use std::marker::PhantomData as PD; + +use crate::backend::init::MI_INDEX_TBL_NAME; +use crate::backend::utils::rest_http::get_version; +use crate::mc::uids::{MCUDID, MCUSID}; +use backend_lib::backend_utils::index::get_uids_from_uid_index_table; +use backend_lib::init::BackendConfig; +use backend_lib::smarty_contracts_cookbook::utils::{ + get_available_steps_for_id, get_proc_def, get_state_def, +}; +use dpsfw::api_method::{Processing, ProcessingReply, RawRequest, SelfDeclaration}; +use dpsfw::dp_utils::cache::{connect_local_db, SQLiteTable}; +use dpsfw::dp_utils::clienting::{perform_client_request, ServiceDestination}; +use dpsfw::dp_utils::crypto::get_keyid_from_pem_pubkey; +use dpsfw::http::StatusCode; +use dpsfw::meta_processors::parse_token; +use dpsfw::processing_meta_store::ProcessingMetaStore; +use dpsfw::self_declare; +use dpsfw_types::bli::io_types::AssocProc; +use dpsfw_types::config::GlobalConfigured; +use dpsfw_types::ctx; +use dpsfw_types::dp::services::NodeService; +use dpsfw_types::error::PE; +use dpsfw_types::merge::MergeLeaf; +use dpsfw_types::EmptyIOData; +use utils::{asynctor, mentor}; + +self_declare!(GetMidAllMethod, comments:r#" +try to get all accessable mcudids +"#); +#[async_trait] +impl Processing for GetMidAllMethod { + type Input = EmptyIOData; + type Output = Vec<MCUDID>; + async fn execute_call( + &self, + request: &RawRequest, + _input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + _: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let _ = asynctor!(cid, perform_client_request; request, &ServiceDestination::Internal(NodeService::DB, None), output, errors).map_err(|e| errors.push(e.warn(ctx!()))); + + let mut other_uids = mentor!(cid, get_uids_from_uid_index_table;&MI_INDEX_TBL_NAME)?; + + output.append(&mut other_uids); + output.sort_unstable(); + output.dedup(); + + Ok(StatusCode::OK) + } +} + +self_declare!(GetAllBySeries, comments:r#" +try to get all accessable mcudids associated to this series +"#); +#[async_trait] +impl Processing for GetAllBySeries { + type Input = MCUSID; + type Output = Vec<MCUDID>; + async fn execute_call( + &self, + request: &RawRequest, + input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + _: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let _ = asynctor!(cid, perform_client_request; request, &ServiceDestination::Internal(NodeService::DB, None), output, errors).map_err(|e| errors.push(e.warn(ctx!()))); + + let mut other_uids = mentor!(cid, get_uids_from_uid_index_table;&MI_INDEX_TBL_NAME)? + .into_iter() + .filter(|mcudid: &MCUDID| { + mcudid.series_id == input.series_id && mcudid.node_id == input.node_id + }) + .collect(); + + output.append(&mut other_uids); + output.sort_unstable(); + output.dedup(); + + Ok(StatusCode::OK) + } +} + +use crate::backend::types::io_types::{DigRep, GetMiDataOutput}; +use crate::db::types::io_types::GetMiDataOutput as DBGetMiDataOutput; +use dpsfw_types::bli::io_types::OurProc; +self_declare!(GetMidDataMethod, comments:r#" +try to get all accessable data for given mcudids +"#); +#[async_trait] +impl Processing for GetMidDataMethod { + type Input = Vec<MCUDID>; + #[rustfmt::skip] + type Output = GetMiDataOutput; + async fn execute_call( + &self, + request: &RawRequest, + input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + pms: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let result = GetMidDataVpnMethod {} + .execute_call(request, input, output, errors, pms) + .await?; + + output.sort_unstable_by_key(|mi| mi.data.mcudid.clone()); + // read our_notifications table + let conn = connect_local_db(&BackendConfig::get_global().backend_cache).unwrap(); + let procs_tbl = SQLiteTable::new_from_struct("our_procs", "proc_id", PD::<OurProc>); + let procs = procs_tbl.select(&conn, None)?; + + let token = parse_token(&request.token)?; + let issuer_id = get_keyid_from_pem_pubkey(&token.0.sub)?; + + let mut assoc_procs: HashMap<MCUDID, Vec<MergeLeaf<AssocProc>>> = HashMap::new(); + for note in procs { + // get proc_type_info + let proc = match get_proc_def(¬e.proc_type_id) { + Ok(proc) => proc, + Err(e) => { + errors.push(e); + continue; + } + }; + // get the current state definition + let state = match get_state_def(¬e.proc_type_id, ¬e.state) { + Ok(state) => state, + Err(e) => { + errors.push(e); + continue; + } + }; + let avail_steps = asynctor!(cid, get_available_steps_for_id; ¬e.proc_id, &issuer_id) + .map_err(|e| e.id_warn(ctx!())) + .unwrap_or_default(); + let new_note = AssocProc { + proc_id: note.proc_id, + state: note.state, + last_update: note.last_update, + state_label: state.name.to_string(), + proc_type_id: note.proc_type_id, + proc_type_label: proc.name.to_string(), + avail_steps, + }; + for uid in note.guids.0 { + let id = match MCUDID::try_from(uid) { + Ok(id) => id, + Err(e) => { + e.id_warn(ctx!()); + continue; + } + }; + match assoc_procs.get_mut(&id) { + None => { + assoc_procs + .insert(id, vec![MergeLeaf::with_current_node(new_note.clone())]); + } + Some(procs) => { + procs.push(MergeLeaf::with_current_node(new_note.clone())); + } + } + } + } + + // join process data to DigRep data + for dataset in output { + // if let Some(ref mcudid) = dataset.data.mcudid { + if let Some(notes) = assoc_procs.remove(&dataset.data.mcudid) { + dataset.proc_data = notes; + }; + // }; + } + Ok(result) + } +} + +use crate::backend::methods::opcua::MeasurementInstrumentMethod; +use crate::backend::types::opcua::MeasurementInstrumentMethodInput; +use crate::backend::types::opcua::MeasurementInstrumentMethodOutput; +use crate::backend::types::opcua::MethodKind; +self_declare!(GetMidDataVpnMethod, comments:r#" +try to get all data from DB and MI for given mcudids +"#); +#[async_trait] +impl Processing for GetMidDataVpnMethod { + type Input = Vec<MCUDID>; + #[rustfmt::skip] + type Output = GetMiDataOutput; + async fn execute_call( + &self, + request: &RawRequest, + _input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + pms: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let mut fw_output = DBGetMiDataOutput::default(); + let result = asynctor!(cid, perform_client_request; request, &ServiceDestination::Internal(NodeService::DB, None), &mut fw_output, errors).map_err(|e| e.id_debug(ctx!()))?; + for dataset in fw_output { + let mcudid = dataset.data.mcudid.clone(); + let mut final_dataset = DigRep { + mcudid: mcudid.clone(), + data: dataset.data, + persons_data: dataset.persons_data, + locations_data: dataset.locations_data, + device_files: dataset.device_files, + series_data: dataset.series_data, + series_files: dataset.series_files, + device_type_data: dataset.device_type_data, + proc_data: Vec::new(), + installed_software_version: MergeLeaf::default(), + }; + + // only proceed with mi_link defined + // TODO: what if there is dissens? + if let Some(link) = final_dataset.data.mi_link.get_first_val() { + match link.0.scheme() { + "http" | "https" => { + // only proceed with encoding defined + match final_dataset.data.mi_link_enc.get_first_val() { + None => (), + Some(enc) => match get_version(enc, link, errors).await { + Ok(status) => final_dataset + .installed_software_version + .add_with_current_node(status), + Err(e) => errors.push(e), + }, + }; + } + "opc.tcp" => { + let mut out = MeasurementInstrumentMethodOutput::default(); + let input = MeasurementInstrumentMethodInput { + mcudid, + method_kind: MethodKind::GetVersion, + input_argument: None, + }; + + let method = MeasurementInstrumentMethod {}; + // this seems to be very hacky reusing the wrong RawRequest + match asynctor!(cid, method.execute_call; request, &input, &mut out, errors, pms) + { + Ok(_status) => { + final_dataset + .installed_software_version + .add_with_current_node(out.result); + } + Err(error) => errors.push(error), + }; + } + _ => (), + } + }; + output.push(final_dataset); + } + Ok(result) + } +} + +use crate::db::types::io_types::MiDataIO; +self_declare!(UpdateMidDataMethod, comments:r#" +update mi data for given mcudid, fields that are null or omitted won't be updated +"#); +#[async_trait] +impl Processing for UpdateMidDataMethod { + type Input = MiDataIO; + type Output = EmptyIOData; + async fn execute_call( + &self, + request: &RawRequest, + _input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + _: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let result = asynctor!(cid, perform_client_request; request, &ServiceDestination::Internal(NodeService::DB, None), output, errors); + result.map_err(|e| e.id_debug(ctx!())) + } +} + +self_declare!(InsertMidDataMethod, comments:r#" +insert new mi data, fields that are omitted will be set as NULL in postgres +"#); +#[async_trait] +impl Processing for InsertMidDataMethod { + type Input = Vec<MiDataIO>; + type Output = EmptyIOData; + async fn execute_call( + &self, + request: &RawRequest, + _input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + _: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let result = asynctor!(cid, perform_client_request; request, &ServiceDestination::Internal(NodeService::DB, None), output, errors); + result.map_err(|e| e.id_debug(ctx!())) + } +} + +self_declare!(DeleteMidDataMethod, comments:r#" +delete mi data for given mcudid, will error if there are ForeignKeys to this mcudid +"#); +#[async_trait] +impl Processing for DeleteMidDataMethod { + type Input = MCUDID; + type Output = EmptyIOData; + async fn execute_call( + &self, + request: &RawRequest, + _input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + _: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let result = asynctor!(cid, perform_client_request; request, &ServiceDestination::Internal(NodeService::DB, None), output, errors); + result.map_err(|e| e.id_debug(ctx!())) + } +} diff --git a/src/backend/methods/mod.rs b/src/backend/methods/mod.rs new file mode 100644 index 0000000000000000000000000000000000000000..61197442257087a52699e79bba077623ad7d87f7 --- /dev/null +++ b/src/backend/methods/mod.rs @@ -0,0 +1,9 @@ +// all the opcua functions +pub mod opcua; + +// all the DR query functions functions +pub mod mid; +pub mod series; + +// sym2 trealted queries +pub mod sym2; diff --git a/src/backend/methods/opcua.rs b/src/backend/methods/opcua.rs new file mode 100644 index 0000000000000000000000000000000000000000..c36200cb782e835190ad93d08390ad12996aeb53 --- /dev/null +++ b/src/backend/methods/opcua.rs @@ -0,0 +1,88 @@ +/*! all the opcua functions +/// The MI acts as an opcua server and here is the api to interact with an MI +//!*/ +use crate::backend::types::opcua::*; +use crate::backend::utils::opcua::*; +use dpsfw::api_method::{Processing, ProcessingReply, RawRequest, SelfDeclaration}; +use dpsfw::http::StatusCode; +use dpsfw::processing_meta_store::ProcessingMetaStore; +use dpsfw::self_declare; +use dpsfw_types::ctx; +use dpsfw_types::error::PE; +use utils::ciddebug; + +use async_trait::async_trait; + +self_declare!(MeasurementInstrumentGet, comments:r#" +to read measurement values from the mi +"#); +#[async_trait] +impl Processing for MeasurementInstrumentGet { + type Input = MeasurementInstrumentGetInput; + type Output = MeasurementInstrumentGetOutput; + async fn execute_call( + &self, + request: &RawRequest, + input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + _: &mut ProcessingMetaStore, + ) -> ProcessingReply { + ciddebug!("Starting opcua process with get measurement"); + + *output = match start_process(OPCUACall::Get(input.clone()), request.token.clone(), errors) + .await + { + OPCUAResult::Ok(s) => MeasurementInstrumentGetOutput { result: s }, + OPCUAResult::Error(s) => { + return PE::opcua_connection_error(&format!( + "Failed to get data from opcua client: {}", + s + )) + .id_warn(ctx!()) + .into() + } + }; + + Ok(StatusCode::OK) + } +} + +self_declare!(MeasurementInstrumentMethod, comments:r#" +to call methods on the mi +"#); +#[async_trait] +impl Processing for MeasurementInstrumentMethod { + type Input = MeasurementInstrumentMethodInput; + type Output = MeasurementInstrumentMethodOutput; + async fn execute_call( + &self, + request: &RawRequest, + input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + _: &mut ProcessingMetaStore, + ) -> ProcessingReply { + ciddebug!("Starting opcua process with method call"); + + *output = match start_process( + OPCUACall::Method(input.clone()), + request.token.clone(), + errors, + ) + .await + { + OPCUAResult::Ok(s) => MeasurementInstrumentMethodOutput { result: s }, + OPCUAResult::Error(s) => { + return PE::opcua_connection_error(&format!( + "Failed to get data from opcua client: {}", + s + )) + .id_warn(ctx!()) + .into() + } + }; + + Ok(StatusCode::OK) + } +} diff --git a/src/backend/methods/series.rs b/src/backend/methods/series.rs new file mode 100644 index 0000000000000000000000000000000000000000..16f6dca4123b06356f47b42dba1f6740cc0db3dc --- /dev/null +++ b/src/backend/methods/series.rs @@ -0,0 +1,280 @@ +/*!************************************************************************************************** +Contains all ARM methods known to the backend. +A method is implemented with the RawRequest, an input struct, an output struct and an error struct, +returns a StatusCode and an Option<PE>, +non fatal errors are pushed to error-vector and +fatal errors are returned +**************************************************************************************************!*/ + +use async_trait::async_trait; +use std::collections::HashMap; +use std::marker::PhantomData as PD; + +use crate::backend::init::SERIES_INDEX_TBL_NAME; +use crate::db::types::io_types::MIDeviceType; +use crate::mc::uids::MCUSID; +use backend_lib::backend_utils::index::get_uids_from_uid_index_table; +use backend_lib::init::BackendConfig; +use backend_lib::smarty_contracts_cookbook::utils::{ + get_available_steps_for_id, get_proc_def, get_state_def, +}; +use dpsfw::api_method::{Processing, ProcessingReply, RawRequest, SelfDeclaration}; +use dpsfw::dp_utils::cache::{connect_local_db, SQLiteTable}; +use dpsfw::dp_utils::clienting::{perform_client_request, ServiceDestination}; +use dpsfw::dp_utils::crypto::get_keyid_from_pem_pubkey; +use dpsfw::http::StatusCode; +use dpsfw::meta_processors::parse_token; +use dpsfw::processing_meta_store::ProcessingMetaStore; +use dpsfw::self_declare; +use dpsfw_types::bli::io_types::AssocProc; +use dpsfw_types::config::GlobalConfigured; +use dpsfw_types::ctx; +use dpsfw_types::dp::services::NodeService; +use dpsfw_types::error::PE; +use dpsfw_types::merge::MergeLeaf; +use dpsfw_types::EmptyIOData; +use utils::{asynctor, mentor}; + +self_declare!(GetSeriesAllMethod, comments:r#" +try to get all accessable mcusids +"#); +#[async_trait] +impl Processing for GetSeriesAllMethod { + type Input = EmptyIOData; + type Output = Vec<MCUSID>; + async fn execute_call( + &self, + request: &RawRequest, + _input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + _: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let _ = asynctor!(cid, perform_client_request; request, &ServiceDestination::Internal(NodeService::DB, None), output, errors).map_err(|e| errors.push(e.warn(ctx!()))); + + let mut other_uids = mentor!(cid, get_uids_from_uid_index_table;&SERIES_INDEX_TBL_NAME)?; + + output.append(&mut other_uids); + output.sort_unstable(); + output.dedup(); + + Ok(StatusCode::OK) + } +} + +self_declare!(GetMidTypesMethod, comments:r#" +try to get all accessable mcusids +"#); +#[async_trait] +impl Processing for GetMidTypesMethod { + type Input = EmptyIOData; + type Output = Vec<MIDeviceType>; + async fn execute_call( + &self, + request: &RawRequest, + _input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + _: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let result = asynctor!(cid, perform_client_request; request, &ServiceDestination::Internal(NodeService::DB, None), output, errors); + result.map_err(|e| e.id_debug(ctx!())) + } +} + +use crate::backend::types::io_types::{GetSeriesDataOutput, SeriesDigRep}; +use crate::db::types::io_types::GetSeriesDataOutput as DBGetSeriesDataOutput; +use crate::db::types::io_types::SeriesDataIO; +use dpsfw_types::bli::io_types::OurProc; +self_declare!(GetSeriesDataMethod, comments:r#" +try to get all accessable data for given mcusids +"#); +#[async_trait] +impl Processing for GetSeriesDataMethod { + type Input = Vec<MCUSID>; + type Output = GetSeriesDataOutput; + async fn execute_call( + &self, + request: &RawRequest, + input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + pms: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let result = GetSeriesDataVPNMethod {} + .execute_call(request, input, output, errors, pms) + .await?; + + output.sort_unstable_by_key(|mi| mi.data.mcusid.clone()); + // read our_notifications table + let conn = connect_local_db(&BackendConfig::get_global().backend_cache).unwrap(); + let procs_tbl = SQLiteTable::new_from_struct("our_procs", "proc_id", PD::<OurProc>); + let procs = procs_tbl.select(&conn, None)?; + + let token = parse_token(&request.token)?; + let issuer_id = get_keyid_from_pem_pubkey(&token.0.sub)?; + + let mut assoc_procs: HashMap<MCUSID, Vec<MergeLeaf<AssocProc>>> = HashMap::new(); + for note in procs { + // get proc_type_info + let proc = match get_proc_def(¬e.proc_type_id) { + Ok(proc) => proc, + Err(e) => { + errors.push(e); + continue; + } + }; + // get the current state definition + let state = match get_state_def(¬e.proc_type_id, ¬e.state) { + Ok(state) => state, + Err(e) => { + errors.push(e); + continue; + } + }; + let avail_steps = asynctor!(cid, get_available_steps_for_id; ¬e.proc_id, &issuer_id) + .map_err(|e| e.id_warn(ctx!())) + .unwrap_or_default(); + let new_note = AssocProc { + proc_id: note.proc_id, + state: note.state, + last_update: note.last_update, + state_label: state.name.to_string(), + proc_type_id: note.proc_type_id, + proc_type_label: proc.name.to_string(), + avail_steps, + }; + for uid in note.guids.0 { + let id = match MCUSID::try_from(uid) { + Ok(id) => id, + Err(e) => { + e.id_warn(ctx!()); + continue; + } + }; + match assoc_procs.get_mut(&id) { + None => { + assoc_procs + .insert(id, vec![MergeLeaf::with_current_node(new_note.clone())]); + } + Some(procs) => { + procs.push(MergeLeaf::with_current_node(new_note.clone())); + } + } + } + } + + // join process data to DigRep data + for dataset in output { + // if let Some(ref mcusid) = dataset.data.mcusid { + if let Some(notes) = assoc_procs.remove(&dataset.data.mcusid) { + dataset.proc_data = notes; + }; + // } + } + Ok(result) + } +} + +self_declare!(GetSeriesDataVPNMethod, comments:r#" +try to get all data from DB for given mcusids +"#); +#[allow(clippy::upper_case_acronyms)] +#[async_trait] +impl Processing for GetSeriesDataVPNMethod { + type Input = Vec<MCUSID>; + type Output = GetSeriesDataOutput; + async fn execute_call( + &self, + request: &RawRequest, + _input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + _pms: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let mut fw_output = DBGetSeriesDataOutput::default(); + let result = asynctor!(cid, perform_client_request; request, &ServiceDestination::Internal(NodeService::DB, None), &mut fw_output, errors).map_err(|e| e.id_debug(ctx!()))?; + for dataset in fw_output { + // let mcusid = match dataset.data.mcusid.clone() { + // Some(id) => id, + // None => { + // errors.push( + // PE::database_error("Series data contains no MCUSID").id_error(ctx!()), + // ); + // continue; + // } + // }; + let final_dataset = SeriesDigRep { + mcusid: dataset.data.mcusid.clone(), + data: dataset.data, + device_type_data: dataset.device_type_data, + files: dataset.files, + // files_pk: String::from("file_id"), + proc_data: Vec::new(), + // proc_data_pk: String::from("proc_id"), + }; + output.push(final_dataset); + } + Ok(result) + } +} + +self_declare!(UpdateSeriesDataMethod, comments:r#" +update series data for given mcusid, fields that are null or omitted won't be updated +"#); +#[async_trait] +impl Processing for UpdateSeriesDataMethod { + type Input = SeriesDataIO; + type Output = EmptyIOData; + async fn execute_call( + &self, + request: &RawRequest, + _input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + _: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let result = asynctor!(cid, perform_client_request; request, &ServiceDestination::Internal(NodeService::DB, None), output, errors); + result.map_err(|e| e.id_debug(ctx!())) + } +} + +self_declare!(InsertSeriesDataMethod, comments:r#" +insert new series data, fields that are omitted will be set as NULL in postgres +"#); +#[async_trait] +impl Processing for InsertSeriesDataMethod { + type Input = Vec<SeriesDataIO>; + type Output = EmptyIOData; + async fn execute_call( + &self, + request: &RawRequest, + _input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + _: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let result = asynctor!(cid, perform_client_request; request, &ServiceDestination::Internal(NodeService::DB, None), output, errors); + result.map_err(|e| e.id_debug(ctx!())) + } +} + +self_declare!(DeleteSeriesDataMethod, comments:r#" +delete series data for given mcusid, will error if there are ForeignKeys to this mcusid +"#); +#[async_trait] +impl Processing for DeleteSeriesDataMethod { + type Input = MCUSID; + type Output = EmptyIOData; + async fn execute_call( + &self, + request: &RawRequest, + _input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + _: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let result = asynctor!(cid, perform_client_request; request, &ServiceDestination::Internal(NodeService::DB, None), output, errors); + result.map_err(|e| e.id_debug(ctx!())) + } +} diff --git a/src/backend/methods/sym2.rs b/src/backend/methods/sym2.rs new file mode 100644 index 0000000000000000000000000000000000000000..3376649d8d7cd27bbabe787f1b22456fc8a49c7b --- /dev/null +++ b/src/backend/methods/sym2.rs @@ -0,0 +1,63 @@ +//! all the process related functions +use async_trait::async_trait; + +use crate::backend::init::McBackendConfig; +use dpsfw::api_method::{Processing, ProcessingReply, RawRequest, SelfDeclaration}; +use dpsfw::dp_utils::clienting::service_dest::ThirdPartyTarget; +use dpsfw::dp_utils::clienting::{ + collect_raw_client_request, guided_send_client_request, ServiceDestination, +}; +use dpsfw::http::{header, StatusCode}; +use dpsfw::processing_meta_store::ProcessingMetaStore; +use dpsfw::self_declare; +use dpsfw_types::config::GlobalConfigured; +use dpsfw_types::ctx; +use dpsfw_types::error::{PEable, PE}; +use dpsfw_types::EmptyIOData; +use utils::asynctor; +use utils::filehelper::FileHelper; + +self_declare!(Sym2GwMethod, comments:r#" +send something to sym2gw +"#); +#[async_trait] +impl Processing for Sym2GwMethod { + type Input = EmptyIOData; + type Output = EmptyIOData; + async fn execute_call( + &self, + request: &RawRequest, + _input: &Self::Input, + _output: &mut Self::Output, + _errors: &mut Vec<PE>, + pms: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let (key, cert, peer_cert, host) = match ( + &McBackendConfig::get_global().sym2_key, + &McBackendConfig::get_global().sym2_cert, + &McBackendConfig::get_global().sym2_peer_cert, + McBackendConfig::get_global().sym2_host.clone(), + ) { + (Some(key), Some(cert), Some(peer_cert), Some(host)) => (key, cert, peer_cert, host), + _ => return PE::invalid_method("sym2 not configured").into(), + }; + let key_pem = FileHelper::read_file_to_string(key).map2pe_error(ctx!())?; + let cert_pem = FileHelper::read_file_to_string(cert).map2pe_error(ctx!())?; + let peer_cert_pem = FileHelper::read_file_to_string(peer_cert).map2pe_error(ctx!())?; + let credentials = Some((key_pem, cert_pem)); + let mut target = ThirdPartyTarget::new(host, credentials)?; + target.set_peer_cert(peer_cert_pem)?; + let mut request = request.clone(); + request.url = "software_hash/".to_owned(); + let service = ServiceDestination::ThirdParty(target); + let response = guided_send_client_request(&request, &service).await?; + let mut content = Vec::new(); + let (_status, content_type) = + asynctor!(cid, collect_raw_client_request; response, &service, &mut content)?; + pms.binary_body = content; + pms.response_headers + .insert(header::CONTENT_TYPE, content_type); + + Ok(StatusCode::OK) + } +} diff --git a/src/backend/mod.rs b/src/backend/mod.rs new file mode 100644 index 0000000000000000000000000000000000000000..99b369b06a593f4847313b5241eaabb6d520fa50 --- /dev/null +++ b/src/backend/mod.rs @@ -0,0 +1,12 @@ +#[cfg(feature = "call_id_tokio")] +pub mod init; +#[cfg(feature = "call_id_tokio")] +pub mod methods; +#[cfg(feature = "call_id_tokio")] +pub mod routing; +#[cfg(feature = "call_id_tokio")] +pub mod smarties; +#[cfg(feature = "call_id_tokio")] +pub mod utils; + +pub mod types; diff --git a/src/backend/routing.rs b/src/backend/routing.rs new file mode 100644 index 0000000000000000000000000000000000000000..0a240c0fdb4ccd930c4678fe66937174734d0a21 --- /dev/null +++ b/src/backend/routing.rs @@ -0,0 +1,194 @@ +/*!*************************************************************************************************** +* Set Input data structure here => actually routes method to the right function * +* Set filter functions here * +**************************************************************************************************!*/ +#![allow(unreachable_patterns)] + +#[cfg(any( + feature = "dpif_world", + feature = "dpif_vpn", + feature = "dpif_intra", + feature = "dpif_debug" +))] +use super::methods; +#[cfg(any(feature = "dpif_world", feature = "dpif_debug", feature = "dpif_intra"))] +use backend_lib::backend_utils::filters as bmp; +#[cfg(any(feature = "dpif_world", feature = "dpif_intra", feature = "dpif_debug"))] +use dpsfw::http::Method; +#[cfg(any( + feature = "dpif_world", + feature = "dpif_vpn", + feature = "dpif_intra", + feature = "dpif_debug" +))] +use dpsfw::meta_processors as mp; +#[cfg(any(feature = "dpif_world", feature = "dpif_debug", feature = "dpif_intra"))] +use dpsfw::processing_meta_store::{ForwardRequest, PlainForward}; +use dpsfw::routing::RoutingTable; +#[cfg(any(feature = "dpif_world", feature = "dpif_debug", feature = "dpif_intra"))] +use dpsfw_types::dp::{services::NodeService, StakeholderType}; + +///************************************************************************* +///* The basic routing Table that should always be merged to Service Tables * +///*************************************************************************/ +pub fn create_routing_table() -> RoutingTable { + #[allow(unused_mut)] + let mut routing_table = RoutingTable::new(); + + #[cfg(any(feature = "dpif_world", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/opcua/mi/get/", + methods::opcua::MeasurementInstrumentGet {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_world", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/opcua/mi/method/", + methods::opcua::MeasurementInstrumentMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_world", feature = "dpif_vpn", feature = "dpif_debug"))] + routing_table.add_get( + "/mc/mid/get_all/", + methods::mid::GetMidAllMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_world", feature = "dpif_vpn", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/mid/get_all/", + methods::mid::GetAllBySeries {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_world", feature = "dpif_intra", feature = "dpif_debug"))] + routing_table.add_custom( + Method::POST, + "/mc/mid/get_data/", + methods::mid::GetMidDataMethod {}, + &[ + &mp::SimpleTokenAuth {}, + &bmp::RegisterForwardFromIndex {}, + &mp::DisseminateRequests {}, + ], + &[&mp::EvaluateResponses {}], + |mut call| { + // forwarding + call.pms.assoc_uid_type = Some(&crate::mc::uids::MCUD_ID); + let pf = PlainForward { + service: NodeService::Backend, + target_type: StakeholderType::OTHERS, + pk: Some(String::from("mcudid")), + }; + call.pms.distribution_targets = Some(ForwardRequest::Plain(pf)); + call + }, + ); + + #[cfg(any(feature = "dpif_vpn", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/mid/get_data/", + methods::mid::GetMidDataVpnMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_world", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/mid/update_data/", + methods::mid::UpdateMidDataMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_world", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/mid/insert_data/", + methods::mid::InsertMidDataMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_world", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/mid/delete/", + methods::mid::DeleteMidDataMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_world", feature = "dpif_debug"))] + routing_table.add_get( + "/mc/series/get_types/", + methods::series::GetMidTypesMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_world", feature = "dpif_vpn", feature = "dpif_debug"))] + routing_table.add_get( + "/mc/series/get_all/", + methods::series::GetSeriesAllMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_world", feature = "dpif_debug"))] + routing_table.add_custom( + Method::POST, + "/mc/series/get_data/", + methods::series::GetSeriesDataMethod {}, + &[ + &mp::SimpleTokenAuth {}, + &bmp::RegisterForwardFromIndex {}, + &mp::DisseminateRequests {}, + ], + &[&mp::EvaluateResponses {}], + |mut call| { + // forwarding + call.pms.assoc_uid_type = Some(&crate::mc::uids::MCUS_ID); + let pf = PlainForward { + service: NodeService::Backend, + target_type: StakeholderType::OTHERS, + pk: Some(String::from("mcusid")), + }; + call.pms.distribution_targets = Some(ForwardRequest::Plain(pf)); + call + }, + ); + #[cfg(any(feature = "dpif_vpn", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/series/get_data/", + methods::series::GetSeriesDataMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_world", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/series/update_data/", + methods::series::UpdateSeriesDataMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_world", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/series/insert_data/", + methods::series::InsertSeriesDataMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_world", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/series/delete/", + methods::series::DeleteSeriesDataMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_world", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/bli/sym2gw/", + methods::sym2::Sym2GwMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + + let mut default_router_table = dpsfw::routing::DEFAULT_ROUTER.clone(); + routing_table.extend(&mut default_router_table); + routing_table +} diff --git a/src/backend/smarties/dea_mi.rs b/src/backend/smarties/dea_mi.rs new file mode 100644 index 0000000000000000000000000000000000000000..56409e5937a482e47dc4affb7066fada55e69a58 --- /dev/null +++ b/src/backend/smarties/dea_mi.rs @@ -0,0 +1,275 @@ +//! this uses sw_version and mcuid +#![allow(clippy::upper_case_acronyms)] + +use crate::backend::smarties::DE; +use crate::backend::utils::demol::{apply_at_demol, reg_mi_for_application}; +use crate::mc::uids::MCUDID; +use dpsfw_types::{ + bli::{ + io_types::{ExiMapIO, SmartyProcRecipeID}, + smarties::{PatternsTemplate, ProcRecipe, ProcState, SmartyProcessing, Stepping}, + }, + dl::{record::StepData, request::ContentProcess}, + dp::uids::{guid::GUIDList, ProcID}, + dp::StakeholderType, + error::PE, + EmptyIOData, {build_steps, build_steps_group}, +}; +use utils::ciddebug; + +use async_trait::async_trait; +use lazy_static::lazy_static; +use phf::phf_map; +use phf::Map; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::sync::Arc; + +use strum_macros::IntoStaticStr; + +lazy_static! { + pub(crate) static ref PROC: ProcRecipe = ProcRecipe { + name: "Apllication for reverfication", + proc_type_id: SmartyProcRecipeID{ + schema: String::from(crate::db::MC_SCHEMA), + recipe_id: String::from("DeaMi") + }, + desc: "This process performs a apllication for reverfication of a measuring instrument using DEMOL", + states: &PROC_STATES, + exi_map: &EXI_MAP, + init_state: DEAMiState::Init.into(), + valid_countries: vec![&DE], + proc_obj_scheme: crate::mc::uids::MCUD_ID, + exi_map_deriver: derive_map, + }; + static ref PROC_STATES: HashMap<&'static str, ProcState> = { + vec![ + ( + DEAMiState::Init.into(), + ProcState { + steps: build_steps![DEAStep, UserApplyDEA], + name: "Initialized", + desc: "The DEA has been initialized.", + }, + ), + ( + DEAMiState::Applied.into(), + ProcState { + steps: build_steps![DEAStep, MSAcceptApplication], + name: "Reverfication requested", + desc: "The reverfication has been requested by the user.", + }, + ), + ( + DEAMiState::Accepted.into(), + ProcState { + steps: build_steps![DEAStep, MSVerifyMI], + name: "Reverfication request Confirmation", + desc: "The reverfication requested has been accepted by the Market Surveillance.", + }, + ), + ( + DEAMiState::Failed.into(), + ProcState { + steps: Vec::new(), + name: "Reverification failed", + desc: "The reverfication has failed, contact your market surveillance authority.", + }, + ), + ( + DEAMiState::Reverified.into(), + ProcState { + steps: Vec::new(), + name: "Re-verified", + desc: "The measuiring instrument has been successfully re-verfied.", + }, + ), + ] + .into_iter() + .collect() + }; +} + +fn derive_map(mut input: ExiMapIO) -> ExiMapIO { + // add ms_node definition + let mut patterns = input.get_mut("user").unwrap().clone(); + patterns.role = [String::from("Service")].to_vec(); + patterns.cn = Vec::new(); + input.insert(String::from("user-node"), patterns); + // add ms_node definition + let mut patterns = input.get_mut("ms").unwrap().clone(); + patterns.role = [String::from("Service")].to_vec(); + patterns.cn = Vec::new(); + input.insert(String::from("ms-node"), patterns); + input +} + +static EXI_MAP: Map<&'static str, PatternsTemplate> = phf_map! { + "user" => PatternsTemplate { + country: Vec::new(), + cn: &[], + sht: &[StakeholderType::USER], + org: &[], + role: &[], + }, + "ms" => PatternsTemplate { + country: Vec::new(), + cn: &[], + sht: &[StakeholderType::MS], + org: &[], + role: &[], + }, +}; + +#[derive(Clone, Debug, Deserialize, Serialize, Hash, IntoStaticStr)] +pub enum DEAMiState { + Init, + Applied, + Accepted, + Failed, + Reverified, +} + +build_steps_group![DEAStep, UserApplyDEA, MSAcceptApplication, MSVerifyMI]; + +/// store the manual update result provided by the user +pub struct UserApplyDEA {} +#[cfg(feature = "call_id_tokio")] +#[async_trait] +impl SmartyProcessing for UserApplyDEA { + type Payload = EmptyIOData; + type States = DEAMiState; + const TARGET_STATES: &'static [Self::States] = &[Self::States::Applied]; + const EXECUTOR_REF: &'static str = "user-node"; + const SUCCESS_MESG: &'static str = "Application for reverfication sent."; + const STEP_MESG: &'static str = "Apply for reverfication"; + const AUTOMATEABLE: bool = true; + async fn exec2( + &self, + guids: &GUIDList, + contract_id: &ProcID, + _payload: Self::Payload, + _errors: &mut Vec<PE>, + ) -> Result<Option<ContentProcess>, PE> { + ciddebug!("user_apply_dea called but nothing to do"); + let outcome = ContentProcess { + contract_id: contract_id.clone(), + contract_state: { + let s: &str = Self::States::Applied.into(); + s.to_string() + }, + contract_parameters: StepData::Step(String::from("{}")), // do we need something here? + contract_type: PROC.proc_type_id.clone(), + guids: guids.clone(), + }; + Ok(Some(outcome)) + } +} +/// store the manual update result provided by the user +pub struct MSAcceptApplication {} +#[cfg(feature = "call_id_tokio")] +#[async_trait] +impl SmartyProcessing for MSAcceptApplication { + type Payload = EmptyIOData; + type States = DEAMiState; + const TARGET_STATES: &'static [Self::States] = &[Self::States::Accepted, Self::States::Failed]; + const EXECUTOR_REF: &'static str = "ms-node"; + const SUCCESS_MESG: &'static str = "Successfully applied at DEMOL for reverfication."; + const STEP_MESG: &'static str = "Accept Application for reverfication"; + const AUTOMATEABLE: bool = true; + async fn exec2( + &self, + guids: &GUIDList, + contract_id: &ProcID, + _payload: Self::Payload, + errors: &mut Vec<PE>, + ) -> Result<Option<ContentProcess>, PE> { + let mut geraete = Vec::new(); + let mcudids: Vec<MCUDID> = guids + .0 + .iter() + .filter_map(|uid| MCUDID::try_from(uid).ok()) + .collect(); + for uid in &guids.0 { + let id = if uid.uid_type == PROC.proc_obj_scheme { + MCUDID::try_from(uid)? + } else { + errors.push(PE::malformed_input( + "requested process requires MCUDID-Type ", + )); + continue; + }; + + // register for DEMOL + reg_mi_for_application(&id, &mut geraete, errors).await?; + } + // try to apply at demol + match apply_at_demol(&mcudids, geraete, errors) + .await + .map_err(|e| { + PE::smart_contract_error( + "Application at Demol failed. You should manually apply again", + vec![e], + ) + }) { + Err(demol_error) => { + let step_mesg = serde_json::to_string(&demol_error).unwrap_or_default(); + errors.push(demol_error); + let outcome = ContentProcess { + contract_id: contract_id.clone(), + contract_state: { + let s: &str = Self::States::Failed.into(); + s.to_string() + }, + contract_parameters: StepData::Step(step_mesg), + contract_type: PROC.proc_type_id.clone(), + guids: guids.clone(), + }; + Ok(Some(outcome)) + } + Ok((_status, resp)) => { + let outcome = ContentProcess { + contract_id: contract_id.clone(), + contract_state: { + let s: &str = Self::States::Accepted.into(); + s.to_string() + }, + contract_parameters: StepData::Step(resp.detail).into(), // do we need something here? + contract_type: PROC.proc_type_id.clone(), + guids: guids.clone(), + }; + Ok(Some(outcome)) + } + } + } +} + +/// store the reverfication in DL +pub struct MSVerifyMI {} +#[cfg(feature = "call_id_tokio")] +#[async_trait] +impl SmartyProcessing for MSVerifyMI { + type Payload = bool; // true means success, false means failed + type States = DEAMiState; + const TARGET_STATES: &'static [Self::States] = + &[Self::States::Reverified, Self::States::Failed]; + const EXECUTOR_REF: &'static str = "ms"; + const SUCCESS_MESG: &'static str = "Device successfully re-verified."; + const STEP_MESG: &'static str = "Confirm reverification"; + async fn exec2( + &self, + guids: &GUIDList, + _contract_id: &ProcID, + payload: Self::Payload, + errors: &mut Vec<PE>, + ) -> Result<Option<ContentProcess>, PE> { + if payload { + for uid in &guids.0 { + if let Err(e) = super::sw_update_mi::refverify(uid).await { + errors.push(e); + } + } + } + Ok(None) + } +} diff --git a/src/backend/smarties/mod.rs b/src/backend/smarties/mod.rs new file mode 100644 index 0000000000000000000000000000000000000000..8a0aa48ec786c88e316634abfc23214bbcee4de5 --- /dev/null +++ b/src/backend/smarties/mod.rs @@ -0,0 +1,24 @@ +use lazy_static::lazy_static; + +use backend_lib::smarty_contracts_cookbook::ScCookbook; +use dpsfw_types::dp::country::{celes, Country}; + +mod dea_mi; +mod sw_update; +mod sw_update_mi; + +// index of all registered processes +lazy_static! { + /// this is the holy smart contracts cookbook, be cautious while reading + // static SC_COOKBOOK: &'static [&'static ProcRecipe] = &[&sw_update::PROC /* 000 */]; + pub(crate) static ref SC_COOKBOOK: ScCookbook = { + // proc_type_id => ProcDefinition + vec![ + (sw_update::PROC.proc_type_id.clone(), sw_update::PROC.deref()), + (sw_update_mi::PROC.proc_type_id.clone(), sw_update_mi::PROC.deref()), + (dea_mi::PROC.proc_type_id.clone(), dea_mi::PROC.deref()) + ].into_iter().collect() + }; + + pub(crate) static ref DE: Country = celes::Country::germany().into(); +} diff --git a/src/backend/smarties/sw_update.rs b/src/backend/smarties/sw_update.rs new file mode 100644 index 0000000000000000000000000000000000000000..bd1901cdfdca87aa836c0f3c8961bc0c3fb1dcf1 --- /dev/null +++ b/src/backend/smarties/sw_update.rs @@ -0,0 +1,367 @@ +//! this uses sw_version and mcuid +#![allow(clippy::upper_case_acronyms)] + +use crate::{ + backend::smarties::DE, + mc::uids::{MCUDID, MCUSID}, +}; +use backend_lib::{init::BackendConfig, methods::bli::StartProcessMethod}; +use dpsfw::{ + api_method::{Processing, RawRequest}, + dp_utils::clienting::{perform_client_request, ServiceDestination}, + http::Method, + processing_meta_store::ProcessingMetaStore, +}; +use dpsfw_types::{ + bli::{ + io_types::{Patterns, SmartyProcRecipeID}, + smarties::{ + patterns_from_template, PatternsTemplate, ProcRecipe, ProcState, SmartyProcessing, + Stepping, + }, + }, + build_steps, build_steps_group, + config::{Config, GlobalConfigured}, + ctx, + dl::request::ContentProcess, + dp::{ + services::NodeService, + uids::{guid::GUIDList, GeneralUID, ProcID}, + StakeholderType, + }, + error::{PEable, PE}, + EmptyIOData, +}; +use utils::{asynctor, ciddebug}; + +use async_trait::async_trait; +use lazy_static::lazy_static; +use phf::{phf_map, Map}; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::sync::Arc; +use strum_macros::IntoStaticStr; + +lazy_static! { + pub(crate) static ref PROC: ProcRecipe = ProcRecipe { + name: "Software Update Process", + proc_type_id: SmartyProcRecipeID{ + schema: String::from(crate::db::MC_SCHEMA), + recipe_id: String::from("SwUpdate") + }, + desc: "This process performs a legally regulated conformity assessment of a software update", + states: &PROC_STATES, + exi_map: &EXI_MAP, + init_state: SwUpdateState::Init.into(), + valid_countries: vec![&DE], + proc_obj_scheme: crate::mc::uids::MCUS_ID, + exi_map_deriver: |x| x, // do nothing + }; + static ref PROC_STATES: HashMap<&'static str, ProcState> = { + vec![ + (SwUpdateState::Init.into(), ProcState { + // steps: vec![ManRequestUpdate{}], + steps: build_steps![SwUpdateStep, ManRequestUpdate], + name: "Initialized", + desc: "The software update has been initialized.", + }), + (SwUpdateState::Uploaded.into(), ProcState { + steps: build_steps![SwUpdateStep, NMIAcceptUpdate], + name: "Conformity Assesment", + desc: "The software has been uploaded for conformity assesment.", + }), + (SwUpdateState::Assesed.into(), ProcState { + steps: build_steps![SwUpdateStep, MSEnsembleTest], + name: "SW conformity assesed", + desc: "The software update has been conformity assesed and VA need to perform an ensemble test.", + }), + (SwUpdateState::Finished.into(), ProcState { + steps: Vec::new(), + name: "Finished", + desc: "The MI users have been informed.", + }), + ].into_iter().collect() + }; +} + +static EXI_MAP: Map<&'static str, PatternsTemplate> = phf_map! { + "man" => PatternsTemplate { + country: Vec::new(), + cn: &[], + sht: &[StakeholderType::MANUFACTURER], + org: &[], + role: &[], + }, + "nmi" => PatternsTemplate { + country: Vec::new(), + cn: &[], + sht: &[StakeholderType::NMI], + org: &[], + role: &[], + }, + "ms" => PatternsTemplate { + country: Vec::new(), + cn: &[], + sht: &[StakeholderType::MS], + org: &[], + role: &[], + }, +}; + +#[derive(Clone, Debug, Deserialize, Serialize, Hash, IntoStaticStr)] +pub enum SwUpdateState { + Init, + Uploaded, + Assesed, + Finished, +} + +build_steps_group![ + SwUpdateStep, + ManRequestUpdate, + NMIAcceptUpdate, + MSEnsembleTest +]; + +// payload objects +#[derive(Deserialize, Serialize, Default)] +pub struct NMIAcceptUpdatePayload { + pub(crate) latest_verified_software_version: String, +} + +/// manufacturer supplying update +pub struct ManRequestUpdate {} +#[async_trait] +impl SmartyProcessing for ManRequestUpdate { + type Payload = EmptyIOData; + type States = SwUpdateState; + const TARGET_STATES: &'static [Self::States] = &[Self::States::Uploaded]; + const EXECUTOR_REF: &'static str = "man"; + const SUCCESS_MESG: &'static str = "Update successfully uploaded."; + const STEP_MESG: &'static str = "Apply for Software Update"; + async fn exec2( + &self, + _guids: &GUIDList, + _contract_id: &ProcID, + _payload: Self::Payload, + _errors: &mut Vec<PE>, + ) -> Result<Option<ContentProcess>, PE> { + ciddebug!("man_request_update called but nothing to do"); + Ok(None) + } +} + +use crate::db::types::io_types::SeriesDataIO; +/// nmi aproving update conformity +pub struct NMIAcceptUpdate {} +#[async_trait] +impl SmartyProcessing for NMIAcceptUpdate { + type Payload = NMIAcceptUpdatePayload; + type States = SwUpdateState; + const TARGET_STATES: &'static [Self::States] = &[Self::States::Assesed]; + const EXECUTOR_REF: &'static str = "nmi"; + const SUCCESS_MESG: &'static str = "Update successfully conformity asessed."; + const STEP_MESG: &'static str = "Confirm conformity"; + async fn exec2( + &self, + guids: &GUIDList, + _contract_id: &ProcID, + payload: Self::Payload, + errors: &mut Vec<PE>, + ) -> Result<Option<ContentProcess>, PE> { + ciddebug!("ms_ensemble_test called => ininitig mi_software_update processes"); + let guid = if guids.0.len() == 1 { + guids.0.first().unwrap() + } else { + return Err(PE::malformed_input( + "requested process requires exactly one MCUSID-Type ", + )); + }; + let id = if guid.uid_type == PROC.proc_obj_scheme { + MCUSID::try_from(guid)? + } else { + return Err(PE::malformed_input( + "requested process requires MCUSID-Type ", + )); + }; + + // update latest_verified_software_version in series_table + let update_obj = SeriesDataIO { + mcusid: id.clone(), + latest_verified_software_version: payload.latest_verified_software_version.into(), + ..Default::default() + }; + let update_request = + RawRequest::new_from_struct(update_obj, Method::POST, "/mc/series/update_data/"); + // update_request.token = token.to_owned(); // TODO: get another token? OR will the MS act on this input? + ciddebug!("update software version in series_table"); + // let mut errors = Vec::new(); + let _ = asynctor!(cid, perform_client_request; &update_request, &ServiceDestination::Internal(NodeService::DB, None), &mut EmptyIOData{}, errors).map_err(|e| e.id_debug(ctx!()))?; + Ok(None) + } +} + +use crate::backend_io::{ProcessIdIO, StartProcessInput}; +use crate::db::types::io_types::GetMiDataOutput; +/// ms confirms ensemble_test +pub struct MSEnsembleTest {} +#[async_trait] +impl SmartyProcessing for MSEnsembleTest { + type Payload = EmptyIOData; + type States = SwUpdateState; + const TARGET_STATES: &'static [Self::States] = &[Self::States::Finished]; + const EXECUTOR_REF: &'static str = "ms"; + const SUCCESS_MESG: &'static str = "Ensemble test has passed successfully."; + const STEP_MESG: &'static str = "Confirm ensemble test passed"; + async fn exec2( + &self, + guids: &GUIDList, + _contract_id: &ProcID, + _payload: Self::Payload, + errors: &mut Vec<PE>, + ) -> Result<Option<ContentProcess>, PE> { + ciddebug!("ms_ensemble_test called => ininitig mi_software_update processes"); + let guid = if guids.0.len() == 1 { + guids.0.first().unwrap() + } else { + return Err(PE::malformed_input( + "requested process requires exactly one MCUSID-Type ", + )); + }; + + // get all known MI of serie + let mcusid = if guid.uid_type == PROC.proc_obj_scheme { + MCUSID::try_from(guid)? + } else { + return Err(PE::malformed_input( + "requested process requires MCUSID-Type ", + )); + }; + + // get all MIs + let get_mcudids_request = + RawRequest::new_from_struct(mcusid.clone(), Method::POST, "/mc/mid/get_all/"); + let mut mcudids: Vec<MCUDID> = Vec::new(); + let _ = asynctor!(cid, perform_client_request; &get_mcudids_request, &ServiceDestination::Internal(NodeService::DB, None), &mut mcudids, errors).map_err(|e| e.id_debug(ctx!()))?; // maybe ask network? + + // get all mi data + let get_data_request = + RawRequest::new_from_struct(mcudids, Method::POST, "/mc/mid/get_data/"); + let mut mi_list = GetMiDataOutput::default(); + let _ = asynctor!(cid, perform_client_request; &get_data_request, &ServiceDestination::Internal(NodeService::DB, None), &mut mi_list, errors).map_err(|e| e.id_debug(ctx!()))?; + + // iterate over all MI + for mi in mi_list { + // let mcudid = match mi.data.mcudid { + // None => continue, + // Some(id) => id, + // }; + ciddebug!( + "ininitig mi_software_update processes for {}", + mi.data.mcudid + ); + let owner = match mi.data.owner.get_first_val() { + None => { + ciddebug!("No owner for {}", mi.data.mcudid); + continue; + } + Some(_owner) => match mi.data.owner.get_consensus_val() { + None => { + ciddebug!("No get_consensus about owner for {}", mi.data.mcudid); + continue; + } + Some(owner) => owner, + }, + }; + // get init paramas + let proc_type_id = SmartyProcRecipeID { + schema: String::from("mc"), + recipe_id: String::from("SwUpdateMi"), + }; + let proc = BackendConfig::get_global() + .sc_cookbook + .get(&proc_type_id) + .ok_or_else(|| { + PE::smart_contract_error( + &format!("no such process installed: {}", proc_type_id), + Vec::new(), + ) + .id_error(ctx!()) + }); + let proc = match proc { + Ok(proc) => proc, + Err(_) => continue, + }; + + let mut exi_map = proc + .exi_map + .entries() + .map(|(key, pattern)| (key.to_string(), patterns_from_template(pattern))) + .collect::<HashMap<String, Patterns>>(); + + match exi_map + .insert( + "user".to_owned(), + Patterns { + country: Vec::new(), + cn: Vec::new(), + sht: vec![StakeholderType::USER], + org: vec![owner.to_owned()], + role: Vec::new(), + }, + ) + .map2pe_id_error(ctx!()) + { + Ok(_) => (), + Err(_) => continue, + }; + match exi_map + .insert( + "ms".to_owned(), + Patterns { + country: Vec::new(), + cn: Vec::new(), + sht: vec![StakeholderType::MS], + org: vec![Config::get_global().node.node_org.clone()], + role: Vec::new(), + }, + ) + .map2pe_id_error(ctx!()) + { + Ok(_) => (), + Err(_) => continue, + }; + + // start proc + let new_proc = StartProcessInput { + proc_type_id, + exi_map, + // TODO: group MI updates by user/owner + guids: GUIDList(vec![GeneralUID::from(mi.data.mcudid.clone())]), + }; + let start_request = + RawRequest::new_from_struct(mcusid.clone(), Method::POST, "/bli/start_process/"); + let call = StartProcessMethod {}; + let mut proc_output = ProcessIdIO::default(); + let mut pms = ProcessingMetaStore::new(); + match call + .execute_call( + &start_request, + &new_proc, + &mut proc_output, + errors, + &mut pms, + ) + .await + { + Ok(proc) => proc, + Err(e) => { + e.id_warn(ctx!()); + continue; + } + }; + } + + Ok(None) + } +} diff --git a/src/backend/smarties/sw_update_mi.rs b/src/backend/smarties/sw_update_mi.rs new file mode 100644 index 0000000000000000000000000000000000000000..28609b41060c5bc1f0c277eea64b751581812df2 --- /dev/null +++ b/src/backend/smarties/sw_update_mi.rs @@ -0,0 +1,660 @@ +//! this uses sw_version and mcuid +#![allow(clippy::upper_case_acronyms)] + +use crate::{ + backend::{ + smarties::DE, + types::mi_comm::StartUpdateInput, + utils::{ + demol::{apply_at_demol, reg_mi_for_application, structs::Geraet}, + rest_http::start_update, + }, + }, + db::types::io_types::MiDataIO, + mc::uids::MCUDID, +}; +use dpsfw::{ + api_method::{Processing, RawRequest}, + dp_utils::clienting::{perform_client_request, ServiceDestination}, + http::{Method, StatusCode}, + processing_meta_store::ProcessingMetaStore, +}; +use dpsfw_types::{ + bli::{ + io_types::{ExiMapIO, SmartyProcRecipeID}, + smarties::{PatternsTemplate, ProcRecipe, ProcState, SmartyProcessing, Stepping}, + }, + build_steps, build_steps_group, ctx, + dl::{record::StepData, request::ContentProcess}, + dp::{ + comm_link::{CommLink, LinkContentEncoding}, + services::NodeService, + uids::{guid::GUIDList, GeneralUID, ProcID}, + StakeholderType, + }, + error::{PEable, PE}, + DateTime, EmptyIOData, +}; +use utils::{asynctor, ciddebug}; + +use async_trait::async_trait; +use lazy_static::lazy_static; +use phf::phf_map; +use phf::Map; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::sync::Arc; +use strum_macros::IntoStaticStr; +use time::OffsetDateTime; + +lazy_static! { + pub(crate) static ref PROC: ProcRecipe = ProcRecipe { + name: "Software Update Process", + proc_type_id: SmartyProcRecipeID { + schema: String::from(crate::db::MC_SCHEMA), + recipe_id: String::from("SwUpdateMi") + }, + desc: "This process performs a legally regulated software update", + states: &PROC_STATES, + exi_map: &EXI_MAP, + init_state: SwUpdateMiState::Init.into(), + valid_countries: vec![&DE], + proc_obj_scheme: crate::mc::uids::MCUD_ID, + exi_map_deriver: derive_map, + }; + static ref PROC_STATES: HashMap<&'static str, ProcState> = { + vec![ + ( + SwUpdateMiState::Init.into(), + ProcState { + steps: build_steps![SwUpdateMiStep, UserAcceptUpdate], + name: "Initialized", + desc: "The software update has been initialized.", + }, + ), + ( + SwUpdateMiState::AccpetedUpdate.into(), + ProcState { + steps: build_steps![SwUpdateMiStep, MSInvalidate], + name: "Update Pending", + desc: "The update has been accepted by the user.", + }, + ), + ( + SwUpdateMiState::Invalidated.into(), + ProcState { + steps: build_steps![SwUpdateMiStep, MIUpdate, UserUpdate], + name: "Update Pending", + desc: "The MI is devalidate for the Update.", + }, + ), + ( + SwUpdateMiState::Updated.into(), + ProcState { + steps: build_steps![SwUpdateMiStep, MSVerifyUpdate], + name: "Update performed", + desc: "The update has been successfully performed.", + }, + ), + ( + SwUpdateMiState::RoledBack.into(), + ProcState { + steps: build_steps![SwUpdateMiStep, MSVerifyRoleback], + name: "Updated canceled", + desc: "The update has errored and the MI is roled back.", + }, + ), + ( + SwUpdateMiState::Bricked.into(), + ProcState { + steps: Vec::new(), + name: "Update failed", + desc: "The update has errored and the MI is disabled now.", + }, + ), + ( + SwUpdateMiState::Reverified.into(), + ProcState { + steps: Vec::new(), + name: "Re-verified after role back ", + desc: "The update has failed, but the MI has recovered to the previous verfied state.", + }, + ), + ] + .into_iter() + .collect() + }; +} + +fn derive_map(mut input: ExiMapIO) -> ExiMapIO { + // add user-node-definition + let mut patterns = input.get_mut("user").unwrap().clone(); + patterns.role = [String::from("Service")].to_vec(); + patterns.cn = Vec::new(); + input.insert(String::from("user-node"), patterns); + + // add ms_node definition + let mut patterns = input.get_mut("ms").unwrap().clone(); + patterns.role = [String::from("Service")].to_vec(); + patterns.cn = Vec::new(); + input.insert(String::from("ms-node"), patterns); + input +} + +static EXI_MAP: Map<&'static str, PatternsTemplate> = phf_map! { + "user" => PatternsTemplate { + country: Vec::new(), + cn: &[], + sht: &[StakeholderType::USER], + org: &[], + role: &[], + }, + "ms" => PatternsTemplate { + country: Vec::new(), + cn: &[], + sht: &[StakeholderType::MS], + org: &[], + role: &[], + }, +}; + +#[derive(Clone, Debug, Deserialize, Serialize, Hash, IntoStaticStr)] +pub enum SwUpdateMiState { + Init, + AccpetedUpdate, + Invalidated, + Updated, + RoledBack, + Bricked, + Reverified, +} + +build_steps_group![ + SwUpdateMiStep, + UserAcceptUpdate, + MSInvalidate, + MIUpdate, + UserUpdate, + MSVerifyUpdate, + MSVerifyRoleback +]; + +// payload objects +#[derive(Deserialize, Serialize, Default)] +pub struct UserUpdatePayload { + pub last_update: DateTime, +} + +/// take the user update-acceptance +pub struct UserAcceptUpdate {} +#[async_trait] +impl SmartyProcessing for UserAcceptUpdate { + type Payload = EmptyIOData; + type States = SwUpdateMiState; + const TARGET_STATES: &'static [Self::States] = &[Self::States::AccpetedUpdate]; + const EXECUTOR_REF: &'static str = "user"; + const SUCCESS_MESG: &'static str = "Update accepted by user for installation."; + const STEP_MESG: &'static str = "Accept installing the Software Update"; + async fn exec2( + &self, + _guids: &GUIDList, + _contract_id: &ProcID, + _payload: Self::Payload, + _errors: &mut Vec<PE>, + ) -> Result<Option<ContentProcess>, PE> { + ciddebug!("user_accept_update called but nothing to do"); + Ok(None) + } +} + +/// invalidate the mi +pub struct MSInvalidate {} +#[async_trait] +impl SmartyProcessing for MSInvalidate { + type Payload = EmptyIOData; + type States = SwUpdateMiState; + const TARGET_STATES: &'static [Self::States] = &[Self::States::Invalidated]; + const EXECUTOR_REF: &'static str = "ms-node"; + const SUCCESS_MESG: &'static str = "Verfication is devalidated."; + const STEP_MESG: &'static str = "NOBODY READS THIS"; + const AUTOMATEABLE: bool = true; + async fn exec2( + &self, + guids: &GUIDList, + contract_id: &ProcID, + _payload: Self::Payload, + _errors: &mut Vec<PE>, + ) -> Result<Option<ContentProcess>, PE> { + for uid in &guids.0 { + let id = if uid.uid_type == PROC.proc_obj_scheme { + MCUDID::try_from(uid)? + } else { + return Err(PE::malformed_input( + "requested process requires MCUDID-Type ", + )); + }; + + // update is_verified in mi_table + let update_obj = MiDataIO { + mcudid: id.clone(), + is_verified: false.into(), + ..Default::default() + }; + let update_request = + RawRequest::new_from_struct(update_obj, Method::POST, "/mc/mid/update_data/"); + // update_request.token = token.to_owned(); // TODO: get another token? OR will the MS act on this input? + ciddebug!("update to mi is not verified in mi_table"); + let mut errors = Vec::new(); + let res = asynctor!(cid, perform_client_request; &update_request, &ServiceDestination::Internal(NodeService::DB, None), &mut EmptyIOData{}, &mut errors); + + if let Err(e) = res { + return Err(e.id_debug(ctx!())); + } + } + let contract_state = SwUpdateMiState::Invalidated; + let content = ContentProcess { + //this seems a bit useless + contract_id: contract_id.clone(), + guids: guids.clone(), + contract_state: { + let s: &str = contract_state.into(); + s.to_string() + }, + contract_type: PROC.proc_type_id.clone(), + contract_parameters: StepData::Step( + serde_json::to_string(&<Self::Payload>::default()).map2pe_id_error(ctx!())?, + ), + }; + + Ok(Some(content)) + } +} + +use crate::backend::methods::opcua::MeasurementInstrumentMethod; +use crate::backend::types::opcua::{ + MeasurementInstrumentMethodInput, MeasurementInstrumentMethodOutput, MethodKind, +}; +use crate::db::types::io_types::GetMiDataOutput as DBGetMiDataOutput; +/// update the mi automatically +pub struct MIUpdate {} +#[async_trait] +impl SmartyProcessing for MIUpdate { + type Payload = EmptyIOData; + type States = SwUpdateMiState; + const TARGET_STATES: &'static [Self::States] = &[ + Self::States::Updated, + Self::States::RoledBack, + Self::States::Bricked, + ]; + const EXECUTOR_REF: &'static str = "user-node"; + const SUCCESS_MESG: &'static str = "Device successfully self updated."; + const STEP_MESG: &'static str = "NOBODY READS THIS"; + const AUTOMATEABLE: bool = true; + async fn exec2( + &self, + guids: &GUIDList, + contract_id: &ProcID, + _payload: Self::Payload, + errors: &mut Vec<PE>, + ) -> Result<Option<ContentProcess>, PE> { + let mut demol_application_list = Vec::new(); + let mcudids: Vec<MCUDID> = guids + .0 + .iter() + .filter_map(|uid| MCUDID::try_from(uid).ok()) + .collect(); + for uid in &guids.0 { + if let Err(e) = update_single_mi(uid, &mut demol_application_list, errors).await { + errors.push(e.id_warn(ctx!())) + } + } + // try to apply at demol + if let Err(demol_error) = apply_at_demol(&mcudids, demol_application_list, errors) + .await + .map_err(|e| { + PE::smart_contract_error( + "Application at Demol failed. You should manually apply again", + vec![e], + ) + }) + { + errors.push(demol_error) + }; + + // final state + // let contract_state = match result { + // Err(e) => { + // e.id_warn(ctx!()); + // // alternatively "roled_back" + // // might depend on the content of e? + // // "bricked" + // SwUpdateMiState::Bricked + // } + // Ok(_status) => SwUpdateMiState::Updated, + // }; + let contract_state = SwUpdateMiState::Updated; // this is bad, but it's also deprecated + + // generate proc_update_request according to update outcome + let outcome = ContentProcess { + contract_id: contract_id.clone(), + contract_state: { + let s: &str = contract_state.into(); + s.to_string() + }, + contract_parameters: StepData::Step(String::from("{}")), // do we need something here? + contract_type: PROC.proc_type_id.clone(), + guids: guids.clone(), + }; + + Ok(Some(outcome)) + // Ok(None) + } +} + +/// store the manual update result provided by the user +pub struct UserUpdate {} +#[async_trait] +impl SmartyProcessing for UserUpdate { + type Payload = UserUpdatePayload; + type States = SwUpdateMiState; + const TARGET_STATES: &'static [Self::States] = &[ + Self::States::Updated, + Self::States::RoledBack, + Self::States::Bricked, + ]; + const EXECUTOR_REF: &'static str = "user"; + const SUCCESS_MESG: &'static str = "Device updated by the User."; + const STEP_MESG: &'static str = "Confirm update result"; + async fn exec2( + &self, + guids: &GUIDList, + _contract_id: &ProcID, + payload: Self::Payload, + errors: &mut Vec<PE>, + ) -> Result<Option<ContentProcess>, PE> { + let mut demol_application_list = Vec::new(); + let mcudids: Vec<MCUDID> = guids + .0 + .iter() + .filter_map(|uid| MCUDID::try_from(uid).ok()) + .collect(); + for uid in &guids.0 { + let id = if uid.uid_type == PROC.proc_obj_scheme { + MCUDID::try_from(uid)? + } else { + errors.push(PE::malformed_input( + "requested process requires MCUDID-Type ", + )); + continue; + }; + // update last_update in mi_table + let update_obj = MiDataIO { + mcudid: id.clone(), + last_update: payload.last_update.clone().into(), + ..Default::default() + }; + let update_request = + RawRequest::new_from_struct(update_obj, Method::POST, "/mc/mid/update_data/"); + // update_request.token = token.to_owned(); // TODO: get another token? OR will the MS act on this input? + ciddebug!("update last_update timestamp in mi_table"); + if let Err(e) = asynctor!(cid, perform_client_request; &update_request, &ServiceDestination::Internal(NodeService::DB, None), &mut EmptyIOData{}, errors).map_err(|e| e.id_debug(ctx!())) { + errors.push(e); + continue; + }; + + // TODO: if we have to ride this daed horse further we should also switch here to split off this with MS as actor + // try register for DEMOL + if let Err(demol_error) = + reg_mi_for_application(&id, &mut demol_application_list, errors) + .await + .map_err(|e| { + PE::smart_contract_error( + "Application at Demol failed. You should manually apply again", + vec![e], + ) + }) + { + errors.push(demol_error) + }; + } + // try to apply at demol + if let Err(demol_error) = apply_at_demol(&mcudids, demol_application_list, errors) + .await + .map_err(|e| { + PE::smart_contract_error( + "Application at Demol failed. You should manually apply again", + vec![e], + ) + }) + { + errors.push(demol_error) + }; + Ok(None) + } +} + +/// store the reverfication in DL +pub struct MSVerifyUpdate {} +#[async_trait] +impl SmartyProcessing for MSVerifyUpdate { + type Payload = EmptyIOData; + type States = SwUpdateMiState; + const TARGET_STATES: &'static [Self::States] = &[Self::States::Reverified]; + const EXECUTOR_REF: &'static str = "ms"; + const SUCCESS_MESG: &'static str = "Device successfully re-verified."; + const STEP_MESG: &'static str = "Confirm conformity"; + async fn exec2( + &self, + guids: &GUIDList, + _contract_id: &ProcID, + _payload: Self::Payload, + errors: &mut Vec<PE>, + ) -> Result<Option<ContentProcess>, PE> { + for uid in &guids.0 { + if let Err(e) = super::sw_update_mi::refverify(uid).await { + errors.push(e); + } + } + Ok(None) + } +} + +/// store the reverfication in DL +pub struct MSVerifyRoleback {} +#[async_trait] +impl SmartyProcessing for MSVerifyRoleback { + type Payload = EmptyIOData; + type States = SwUpdateMiState; + const TARGET_STATES: &'static [Self::States] = &[Self::States::Reverified]; + const EXECUTOR_REF: &'static str = "ms-node"; + const SUCCESS_MESG: &'static str = "Device re-verified after roleback."; + const STEP_MESG: &'static str = "Confirm verification"; + async fn exec2( + &self, + guids: &GUIDList, + contract_id: &ProcID, + _payload: Self::Payload, + errors: &mut Vec<PE>, + ) -> Result<Option<ContentProcess>, PE> { + for uid in &guids.0 { + if let Err(e) = super::sw_update_mi::refverify(uid).await { + errors.push(e); + } + } + let contract_state = SwUpdateMiState::Reverified; + let content = ContentProcess { + //this seems a bit useless + contract_id: contract_id.clone(), + guids: guids.clone(), + contract_state: { + let s: &str = contract_state.into(); + s.to_string() + }, + contract_type: PROC.proc_type_id.clone(), + contract_parameters: StepData::Step( + serde_json::to_string(&<Self::Payload>::default()).map2pe_id_error(ctx!())?, + ), + }; + Ok(Some(content)) + } +} + +/********* +* Helper * +*********/ +pub(super) async fn refverify(guid: &GeneralUID) -> Result<(), PE> { + let id = if guid.uid_type == PROC.proc_obj_scheme { + MCUDID::try_from(guid)? + } else { + return Err(PE::malformed_input( + "requested process requires MCUDID-Type ", + )); + }; + + // update is_verified in mi_table + let update_obj = MiDataIO { + mcudid: id.clone(), + is_verified: true.into(), + ..Default::default() + }; + let update_request = + RawRequest::new_from_struct(update_obj, Method::POST, "/mc/mid/update_data/"); + // update_request.token = token.to_owned(); // TODO: get another token? OR will the MS act on this input? + ciddebug!("update to mi is verified in mi_table"); + let mut errors = Vec::new(); + let res = asynctor!(cid, perform_client_request; &update_request, &ServiceDestination::Internal(NodeService::DB, None), &mut EmptyIOData{}, &mut errors); + if let Err(e) = res { + Err(e.id_debug(ctx!())) + } else { + Ok(()) + } +} + +async fn update_single_mi( + uid: &GeneralUID, + demol_application_list: &mut Vec<Geraet>, + errors: &mut Vec<PE>, +) -> Result<StatusCode, PE> { + let (id, encoding, link, sw_version) = if uid.uid_type == PROC.proc_obj_scheme { + let id = MCUDID::try_from(uid)?; + //prepare request + let mut fw_output = DBGetMiDataOutput::default(); + let fw_input = vec![id.clone()]; + let fw_request = RawRequest::new_from_struct(fw_input, Method::POST, "/mc/mid/get_data/"); + // fw_request.token = token.to_owned(); // TODO: get another token? OR will the Node act on this? + // ask db for mi_data to get link + let _result = asynctor!(cid, perform_client_request; &fw_request, &ServiceDestination::Internal(NodeService::DB, None), &mut fw_output, errors).map_err(|e| e.id_debug(ctx!()))?; + + if fw_output.len() > 1 { + return Err(PE::database_error( + "multiple results for requested MCUDID-Type from DB", + )); + } else if fw_output.is_empty() { + return Err(PE::not_found( + "no results for requested MCUDID-Type from DB", + )); + }; + let dig_rep = fw_output.pop().unwrap(); + let link = dig_rep.data.mi_link; + let encoding = dig_rep.data.mi_link_enc; + let sw_version = dig_rep + .series_data + .latest_verified_software_version + .get_consensus_val() + .map(|sw_version| sw_version.to_owned()); + // let sw_version = dig_rep // in the old time series_data was optional field of DBDigRep + // .series_data + // .map(|sd| { + // sd.latest_verified_software_version + // .get_consensus() + // .map(sw_version.to_owned()) + // }) + // .flatten(); + + (id, encoding, link, sw_version) + } else { + return Err(PE::malformed_input( + "requested process requires MCUDID-Type ", + )); + }; + + // TODO: maybe also use the other links? + let link = link.into_first_val(); + let encoding = encoding.into_first_val(); + let (encoding, link) = match (encoding, link) { + (None, _) | (_, None) => { + return Err(PE::smart_contract_error( + "No Link/Encoding set, so asuming offline device and canceling proc_step", + Vec::new(), + )); + } + (_, Some(CommLink(conn_str))) if conn_str.as_str() == "" => { + return Err(PE::smart_contract_error( + "Empty Link set, so asuming offline device and canceling proc_step", + Vec::new(), + )); + } + (Some(encoding), Some(link)) => (encoding, link), + }; + + // get software? + utils::todo(); + // TODO: maybe have a field in dig_rep indicating latest software binary or get file ref from proc_history or???? + let software_update = StartUpdateInput::default(); + + let result = match &encoding { + LinkContentEncoding::Opcua => { + let mut output = MeasurementInstrumentMethodOutput::default(); + let update_obj = MeasurementInstrumentMethodInput { + input_argument: sw_version, + mcudid: id.clone(), + method_kind: MethodKind::UpdateVersion, + }; + let update_request = RawRequest::new_from_struct( + update_obj.clone(), + Method::POST, + "/mc/opcua/mi/method/", + ); + ciddebug!("apply software update on mi"); + let method = MeasurementInstrumentMethod {}; + let result = asynctor!(cid, method.execute_call;&update_request, &update_obj, &mut output, errors, &mut ProcessingMetaStore::default()); + ciddebug!("OPCUA-Method says: {:?}", output); + result + } + LinkContentEncoding::Xml | LinkContentEncoding::Json => { + let result = start_update(&encoding, &link, errors, software_update).await; + ciddebug!("HTTP-Update-Method: {:?}", result); + result + } + }; + + // update the mi + + // set last_update + let date = OffsetDateTime::now_local().unwrap_or_else(|_| OffsetDateTime::now_utc()); + // update last_update in mi_table + let update_obj = MiDataIO { + mcudid: id.clone(), + last_update: DateTime(date).into(), + ..Default::default() + }; + let update_request = + RawRequest::new_from_struct(update_obj, Method::POST, "/mc/mid/update_data/"); + // update_request.token = token.to_owned(); // TODO: get another token? OR will the Node act on this? + ciddebug!("update last_update in mi_table"); + let _res = asynctor!(cid, perform_client_request; &update_request, &ServiceDestination::Internal(NodeService::DB, None), &mut EmptyIOData{}, errors).map_err(|e| e.id_debug(ctx!()))?; + + // try register for DEMOL + if let Err(demol_error) = reg_mi_for_application(&id, demol_application_list, errors) + .await + .map_err(|e| { + PE::smart_contract_error( + "Application at Demol failed. You should manually apply again", + vec![e], + ) + }) + { + errors.push(demol_error) + }; + result +} diff --git a/src/backend/types/io_types.rs b/src/backend/types/io_types.rs new file mode 100644 index 0000000000000000000000000000000000000000..8465f74bd9e984fb508dac23b94cc6c50fc3a3e3 --- /dev/null +++ b/src/backend/types/io_types.rs @@ -0,0 +1,56 @@ +use serde::{Deserialize, Serialize}; + +use crate::db::types::io_types::{MIDeviceType, MiDataIO, SeriesDataIO}; +use crate::db_io::{Location, Person}; +use crate::mc::uids::{MCUDID, MCUSID}; +#[cfg(feature = "default_verbose")] +use default_verbose::{CommentedVerboseDefault, DefaultVerbose}; +use dpsfw_types::bli::io_types::AssocProc; +use dpsfw_types::db::io_types::FileDataIO; +use dpsfw_types::error::PE; +use dpsfw_types::merge::MergeLeaf; +#[cfg(feature = "default_verbose")] +use dp_proc_macros::DefaultVerbose; +use dp_proc_macros::Merge; + +/* +MI related +*/ +pub type GetMiDataOutput = Vec<DigRep>; + +#[cfg_attr(feature = "default_verbose", derive(DefaultVerbose))] +#[derive(Serialize, Deserialize, Default, Clone, Debug, PartialEq, Merge)] +pub struct DigRep { + #[primary_key] + pub mcudid: MCUDID, + pub data: MiDataIO, + pub persons_data: Vec<MergeLeaf<Person>>, + pub locations_data: Vec<MergeLeaf<Location>>, + pub device_files: Vec<MergeLeaf<FileDataIO<MCUDID>>>, + // pub device_files_pk: String, + pub series_data: SeriesDataIO, + pub series_files: Vec<MergeLeaf<FileDataIO<MCUSID>>>, + // pub series_files_pk: String, + pub device_type_data: MergeLeaf<MIDeviceType>, + pub proc_data: Vec<MergeLeaf<AssocProc>>, + // pub proc_data_pk: String, + pub installed_software_version: MergeLeaf<String>, +} + +/* + series related +*/ +pub type GetSeriesDataOutput = Vec<SeriesDigRep>; + +#[cfg_attr(feature = "default_verbose", derive(DefaultVerbose))] +#[derive(Serialize, Deserialize, Default, Clone, Debug, PartialEq, Merge)] +pub struct SeriesDigRep { + #[primary_key] + pub mcusid: MCUSID, + pub data: SeriesDataIO, + pub device_type_data: MergeLeaf<MIDeviceType>, + pub files: Vec<MergeLeaf<FileDataIO<MCUSID>>>, + // pub files_pk: String, + pub proc_data: Vec<MergeLeaf<AssocProc>>, + // pub proc_data_pk: String, +} diff --git a/src/backend/types/mi_comm.rs b/src/backend/types/mi_comm.rs new file mode 100644 index 0000000000000000000000000000000000000000..68c9f5e69c48baca95478e7b37c8e14042869afd --- /dev/null +++ b/src/backend/types/mi_comm.rs @@ -0,0 +1,26 @@ +use dpsfw_types::StdInt; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, Default)] +pub struct SoftwareVersionOutput { + pub installed_version: String, +} + +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, Default)] +pub struct StartUpdateInput { + pub hash: String, + pub bin_b64: String, +} +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, Default)] +pub struct StartUpdateOutput { + pub status: String, +} + +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, Default)] +pub struct SoftwareHashInput { + pub init_value: StdInt, +} +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, Default)] +pub struct SoftwareHashOutput { + pub software_hash: String, +} diff --git a/src/backend/types/mod.rs b/src/backend/types/mod.rs new file mode 100644 index 0000000000000000000000000000000000000000..52aee2dced2b049572437a8a3f33271fb7fcc5e6 --- /dev/null +++ b/src/backend/types/mod.rs @@ -0,0 +1,3 @@ +pub mod io_types; // differ from service to service +pub mod mi_comm; // generic API-Structs for communication with MIs +pub mod opcua; // interface for the opcua methods diff --git a/src/backend/types/opcua.rs b/src/backend/types/opcua.rs new file mode 100644 index 0000000000000000000000000000000000000000..182fdcc061dd6f2c7af4e4cb1d755ed0df42118b --- /dev/null +++ b/src/backend/types/opcua.rs @@ -0,0 +1,75 @@ +use crate::mc::uids::MCUDID; +#[cfg(feature = "default_verbose")] +use default_verbose::{CommentedVerboseDefault, DefaultVerbose}; +#[cfg(feature = "default_verbose")] +use dp_proc_macros::DefaultVerbose; + +use serde::{Deserialize, Serialize}; + +/* + Interacting with the measurement instrument +*/ + +#[cfg_attr(feature = "default_verbose", derive(DefaultVerbose))] +#[derive(Serialize, Deserialize, Default, Clone, Debug)] +pub struct MeasurementInstrumentGetInput { + pub mcudid: MCUDID, // unique id of the measurement device +} + +#[cfg_attr(feature = "default_verbose", derive(DefaultVerbose))] +#[derive(Serialize, Deserialize, Default, Clone, Debug)] +pub struct MeasurementInstrumentGetOutput { + pub result: String, // current measurement result, as json +} + +#[cfg_attr(feature = "default_verbose", derive(DefaultVerbose))] +#[derive(Serialize, Deserialize, Default, Clone, Debug)] +pub struct MeasurementInstrumentMethodInput { + pub mcudid: MCUDID, // unique id of the measurement device + pub method_kind: MethodKind, + pub input_argument: Option<String>, +} + +#[cfg_attr(feature = "default_verbose", derive(DefaultVerbose))] +#[derive(Serialize, Deserialize, Default, Clone, Debug)] +pub struct MeasurementInstrumentMethodOutput { + pub result: String, +} + +/* Additional Interfaces for opcua (i hate opcua)*/ +#[cfg_attr(feature = "default_verbose", derive(DefaultVerbose))] +#[derive(Serialize, Deserialize, Clone, Debug)] +pub enum MethodKind { + UpdateVersion, + GetVersion, + GetLog, +} +impl Default for MethodKind { + fn default() -> Self { + MethodKind::GetLog + } +} + +#[allow(clippy::upper_case_acronyms)] +#[derive(Serialize, Deserialize, Clone, Debug)] +pub enum OPCUACall { + Method(MeasurementInstrumentMethodInput), + Get(MeasurementInstrumentGetInput), +} +impl Default for OPCUACall { + fn default() -> Self { + OPCUACall::Get(MeasurementInstrumentGetInput::default()) + } +} + +#[allow(clippy::upper_case_acronyms)] +#[derive(Serialize, Deserialize, Clone, Debug)] +pub enum OPCUAResult { + Ok(String), + Error(String), +} +impl Default for OPCUAResult { + fn default() -> Self { + OPCUAResult::Error("".to_string()) + } +} diff --git a/src/backend/utils/demol/mod.rs b/src/backend/utils/demol/mod.rs new file mode 100644 index 0000000000000000000000000000000000000000..cb65df54dddf8b31c2146d9fafa0dd11e857a92d --- /dev/null +++ b/src/backend/utils/demol/mod.rs @@ -0,0 +1,457 @@ +use percent_encoding::{utf8_percent_encode, AsciiSet, CONTROLS}; +use time::{format_description::well_known::Rfc2822, macros::format_description, OffsetDateTime}; +use url::Url; + +use crate::backend::init::McBackendConfig; +use crate::backend::types::io_types::GetMiDataOutput; +use crate::db::types::io_types::MiDataIO; +use crate::mc::uids::MCUDID; +use backend_lib::methods::files::AddFileMethod; +use dpsfw::api_method::{Processing, RawRequest}; +use dpsfw::dp_utils::clienting::{ + collect_raw_client_request, perform_client_request, send_client_request, +}; +use dpsfw::dp_utils::clienting::{ + service_dest::ThirdPartyTarget, + // collect_raw_client_request, send_client_request, + ServiceDestination, +}; +use dpsfw::http::{header, Method, StatusCode}; +use dpsfw_types::backend::io_types::AddFileInput; +use dpsfw_types::config::GlobalConfigured; +use dpsfw_types::db::io_types::GetFileInput; +use dpsfw_types::dp::{services::NodeService, uids::uri::MimeType}; +use dpsfw_types::error::{PEable, PE}; +use dpsfw_types::{ctx, EmptyIOData}; +use utils::{asynctor, ciddebug}; + +pub mod structs; +use structs::*; + +const QUERY_PARAM: &AsciiSet = &CONTROLS.add(b' ').add(b'"').add(b'<').add(b'>').add(b'`'); + +/// Creates and session and connects to the url via opcua method 'connect_and_activate' +pub async fn apply_at_demol( + mcudids: &[MCUDID], + geraete_liste: Vec<Geraet>, + errors: &mut Vec<PE>, +) -> Result<(StatusCode, DeaReply), PE> { + // let the fun start + ciddebug!("Make DEA for {:?}", mcudids); + + // let geraeteidentifikation = GeraeteIdentifikation { + // messgeraete_art: MessgeraeteArt(MessgeraeteArten::MA12), //tbd + // hersteller: Str50(String::from("MAN1")), //tbd + // typ: Str50(String::from("Superscale 3001")), //tbd + // identnummer: Str50(id.to_string()), //tbd + // ..Default::default() + // }; + + // let geraetebeschreibung = GeraeteBeschreibung::WaagePOS { + // kenngroesse: Kenngroesse(String::from("15 kg")), //tbd + // pruefort_messgeraet: Str20(String::from("Unterm Demonstrator-Tisch")), //tbd + // }; + // let geraetebeschreibung = vec![geraetebeschreibung]; + + // let adresse_ort = Adresse { + // name1: Str35(String::from("PTB")), //tbd + // name2: Some(Str27(String::from("Node Kunde"))), //tbd + // name3: Some(Str40(String::from("Demonstrator-Raum"))), //tbd + // strasse: Str22(String::from("Salzufer")), //tbd + // hausnummer: Some(Str10(String::from("12"))), //tbd + // ort: Str40(String::from("Berlin")), //tbd + // postleitzahl: Str10(String::from("10587")), //tbd + // iso_3166: Str2(String::from("DE")), //tbd + // ..Default::default() + // }; + // let kontakt_ort = Kontakt { + // ansprechpartner: Str100(String::from("Mr. Node")), //tbd + // telefon: Some(Str20(String::from("030-1234-4567"))), //tbd + // fax: Some(Str20(String::from("030-1234-4567"))), //tbd + // mobiltelefon: None, //tbd + // e_mail: Some(Str50(String::from("admin@node1.cloud.mc"))), //tbd + // }; + // let adresse_kontakt_ort = Kunde { + // adresse: adresse_ort, + // kontakt: Some(kontakt_ort), + // }; + // let adresse_rech = Adresse { + // name1: Str35(String::from("MC Node Org")), //tbd + // strasse: Str22(String::from("Abbestr.")), //tbd + // hausnummer: Some(Str10(String::from("2-12"))), //tbd + // ort: Str40(String::from("Berlin")), //tbd + // postleitzahl: Str10(String::from("10587")), //tbd + // iso_3166: Str2(String::from("DE")), //tbd + // ..Default::default() + // }; + // let kontakt_rech = Kontakt { + // ansprechpartner: Str100(String::from("Buchhaltung")), //tbd + // telefon: Some(Str20(String::from("030-1234-4567"))), //tbd + // fax: Some(Str20(String::from("030-1234-4567"))), //tbd + // mobiltelefon: None, //tbd + // e_mail: Some(Str50(String::from("info@cloud.mc"))), //tbd + // }; + // let adresse_kontakt_rech = Kunde { + // adresse: adresse_rech, + // kontakt: Some(kontakt_rech), + // }; + // let adresse_zustell = Adresse { + // name1: Str35(String::from("MC Node Org")), //tbd + // strasse: Str22(String::from("Abbestr.")), //tbd + // hausnummer: Some(Str10(String::from("2-12"))), //tbd + // ort: Str40(String::from("Berlin")), //tbd + // postleitzahl: Str10(String::from("10587")), //tbd + // iso_3166: Str2(String::from("DE")), //tbd + // ..Default::default() + // }; + // let kontakt_zustell = Kontakt { + // ansprechpartner: Str100(String::from("Sekretaritat")), //tbd + // telefon: Some(Str20(String::from("030-1234-4567"))), //tbd + // fax: Some(Str20(String::from("030-1234-4567"))), //tbd + // mobiltelefon: None, //tbd + // e_mail: Some(Str50(String::from("info@cloud.mc"))), //tbd + // }; + // let adresse_kontakt_zustell = Kunde { + // adresse: adresse_zustell, + // kontakt: Some(kontakt_zustell), + // }; + + // let geraet = Geraet { + // satz_i_d: UInt(1), //tbd + // antragsjahr: Jahr(String::from("2021")), //tbd + // eichschein: Bool(false), //tbd + // bestellnummer: Some(Str50(String::from("20-08-21_111111"))), //tbd + // rechnungszustellung: Rechnungszustellung(Rechnungszustellungen::R2), //tbd + // standort: adresse_kontakt_ort, + // rechnungsempfaenger: None, + // // rechnungsempfaenger: Some(adresse_kontakt_rech), + // zustelladresse: None, + // // zustelladresse: Some(adresse_kontakt_zustell), + // geraeteidentifikation, + // geraetebeschreibung, + // ..Default::default() + // }; + let geraete = Geraete { + inner: geraete_liste, + }; + + // let zeitstempel = 2020-11-03T09:45:20.001 + // let dateiname = Eichantrag_PTBInstitutBerlinMCNode_20201103_094520001.xml + let now = OffsetDateTime::now_utc(); + // let zeitstempel = now.format("%Y-%m-%dT%H:%M:%S.%3f").to_string(); + let format = + format_description!("[year]-[month]-[day]T[hour]:[minute]:[second].[subsecond digits:3]"); + let zeitstempel = now.format(&format).map2pe_id_warn(ctx!())?; + let dateiname = format!( + "Eichantrag_PTBInstitutBerlinMCNode_{}.xml", //tbd + // now.format("%Y%m%d_%H%M%S%3f") + now.format(format_description!( + "[year][month][day]_[hour][minute][second][subsecond digits:3]" + )) + .map2pe_id_warn(ctx!())? + ); + + let dokument = Dokument { + dateiname: Str100(dateiname.clone()), + zeitstempel: Zeitstempel(zeitstempel), + test: Bool(true), + }; + + let antragsteller = Antragsteller { + kuerzel: Str10(String::from("PTB-Test")), + kennung: Str10(String::from("PTB-Test")), + e_mail: None, + telefon: None, + }; + + let dea = DigitalerEichantrag { + dokument, + antragsteller, + geraete, + }; + + let dea = dea.try_compatible("::", errors)?; + + // ciddebug!("RUST DEA: \n {:?}", dea); + let xml_decl = String::from(r#"<?xml version="1.0" encoding="UTF-8" standalone="yes"?>"#); + let content = xml_decl + &quick_xml::se::to_string(&dea).map2pe_id_error(ctx!())?; + ciddebug!("Minted to: \n {}", content); + + // regsiter xml-application as file for devices + for id in mcudids { + ciddebug!("Making sure there is {}", id); + let fw_input = MiDataIO { + mcudid: id.clone(), + ..Default::default() + }; + let fw_request = + RawRequest::new_from_struct(fw_input, Method::POST, "/mc/mid/update_data/"); + let _ = asynctor!(cid, perform_client_request; &fw_request, &ServiceDestination::Internal(NodeService::DB, None), &mut EmptyIOData{}, errors).map_err(|e| e.id_debug(ctx!()))?; + + ciddebug!("Adding file to: {}", id); + let fw_input = AddFileInput { + guid: Some(id.into()), + content: content.clone().into_bytes(), + file_type_id: 10, + mime_type: MimeType::XML, + file_location: dateiname.clone(), + signature: Vec::new(), + }; + let fw_request = RawRequest::new_from_struct(fw_input.clone(), Method::POST, "/files/add/"); + let mut fw_output = GetFileInput::default(); + let _ = AddFileMethod {} + .execute_call( + &fw_request, + &fw_input, + &mut fw_output, + errors, + &mut Default::default(), + ) + .await?; + } + + // login_for_demol_token + let dest = { + let mut t = + ThirdPartyTarget::new(McBackendConfig::get_global().demol_host.clone(), None).unwrap(); + t.use_mozilla_pki = true; + ServiceDestination::ThirdParty(t) + }; + let token = asynctor!(login_for_demol_token; &dest)?; + ciddebug!("{}", token); + + // forward to demol + let mut fw_request = RawRequest::new_empty(); + fw_request.method = Method::POST; + fw_request.raw_input = content.clone().into(); + fw_request.date = now.format(&Rfc2822).unwrap_or_default().into_bytes(); + fw_request.mime_type = MimeType::XML; + fw_request.url = format!("/api/v1/Eichantrag"); + fw_request.ext_header.insert( + header::AUTHORIZATION, + header::HeaderValue::from_str(&format!("Bearer {token}")).map2pe_id_warn(ctx!())?, + ); + + ciddebug!("{:?}", fw_request); + + let response = asynctor!(send_client_request; &fw_request, &dest)?; + let mut response_body = Vec::new(); + let (status, content_type) = + asynctor!(cid, collect_raw_client_request; response, &dest, &mut response_body)?; + // result.map_err(|e| e.id_debug()) + + ciddebug!("status: {:?}", status); + ciddebug!("content_type: {:?}", content_type); + let body = String::from_utf8_lossy(&response_body); + ciddebug!("{}", body); + + let resp: DeaReply = serde_json::from_slice(&response_body).map2pe_id_warn(ctx!())?; + match resp.status { + 200 | 201 => {} + _ => { + return Err(PE::external_connection_error( + resp.detail, + resp.invalid_params + .iter() + .map(|param| { + PE::malformed_input(format!("{}: {}", param.record_id, param.reason)) + }) + .collect(), + )); + } + } + // let _reply: DigitalerEichantragAntwort = (!status.is_server_error()) + // .then(|| { + // quick_xml::de::from_str::<DigitalerEichantragAntwort>(&xml_str).map2pe_id_error(ctx!()) + // }) + // .ok_or_else(|| { + // PE::external_connection_error( + // "Got Error from ", + // vec![PE::unsuccessfull_request(xml_str, Vec::new())], + // ) + // })??; + + Ok((status, resp)) +} + +pub async fn reg_mi_for_application( + id: &MCUDID, + devices: &mut Vec<Geraet>, + errors: &mut Vec<PE>, +) -> Result<(), PE> { + // get data from DigRep + let fw_request = RawRequest::new_from_struct(vec![id], Method::POST, "/mc/mid/get_data/"); + let mut fw_output = GetMiDataOutput::default(); + let _ = asynctor!(cid, perform_client_request; &fw_request, &ServiceDestination::Internal(NodeService::Backend, Some(dpsfw_types::dp::services::ServiceInterface::World)), &mut fw_output, errors).map_err(|e| e.id_debug(ctx!()))?; + + let dig_rep = fw_output.pop().unwrap(); + + // let the fun start + ciddebug!("Preapare DEA for {}", id); + + let geraeteidentifikation = GeraeteIdentifikation { + // messgeraete_art: MessgeraeteArt(MessgeraeteArten::12), //TODO:tbd + messgeraete_art: MessgeraeteArt(MessgeraeteArten::from( + dig_rep + .device_type_data + .get_first_val() + .map(|dtd| dtd.mid_type_id) + .unwrap_or(99), + )), //TODO:tbd + // hersteller: Str50(String::from("MAN1")), //tbd + hersteller: Str50( + dig_rep + .series_data + .manufacturer + .into_first_val() + .unwrap_or_default(), + ), //tbd + // typ: Str50(String::from("Superscale 3001")), //tbd + typ: Str50( + dig_rep + .series_data + .name + .into_first_val() + .unwrap_or_default(), + ), //tbd + identnummer: Str50(id.to_string()), //tbd + ..Default::default() + }; + + let geraetebeschreibung = GeraetebeschreibungVariants::WaagePOS { + kenngroesse: Kenngroesse(String::from("15 kg")), //tbd + pruefort_messgeraet: Str20(String::from("Unterm Demonstrator-Tisch")), //tbd + }; + let geraetebeschreibung = vec![geraetebeschreibung]; + + let adresse_ort = Adresse { + name1: Str35(String::from("PTB")), //tbd + name2: Some(Str27(String::from("Node Kunde"))), //tbd + name3: Some(Str40(String::from("Demonstrator-Raum"))), //tbd + strasse: Str22(String::from("Abbestraße")), //tbd + hausnummer: Some(Str10(String::from("2-12"))), //tbd + ort: Str40(String::from("Berlin")), //tbd + postleitzahl: Str10(String::from("10587")), //tbd + // strasse: Str22(String::from("Küppersgarten")), + // hausnummer: Some(Str10(String::from("45"))), + // ort: Str40(String::from("Bonn")), + // postleitzahl: Str10(String::from("53229")), + iso_3166: Str2(String::from("DE")), //tbd + ..Default::default() + }; + let kontakt_ort = Kontakt { + ansprechpartner: Str100(String::from("Mr. Node")), //tbd + telefon: Some(Str20(String::from("030-1234-4567"))), //tbd + fax: Some(Str20(String::from("030-1234-4567"))), //tbd + mobiltelefon: None, //tbd + e_mail: Some(Str50(String::from("admin@node1.cloud.mc"))), //tbd + }; + let adresse_kontakt_ort = Kunde { + adresse: adresse_ort, + kontakt: Some(kontakt_ort), + }; + // let adresse_rech = Adresse { + // name1: Str35(String::from("MC Node Org")), //tbd + // strasse: Str22(String::from("Abbestr.")), //tbd + // hausnummer: Some(Str10(String::from("2-12"))), //tbd + // ort: Str40(String::from("Berlin")), //tbd + // postleitzahl: Str10(String::from("10587")), //tbd + // iso_3166: Str2(String::from("DE")), //tbd + // ..Default::default() + // }; + // let kontakt_rech = Kontakt { + // ansprechpartner: Str100(String::from("Buchhaltung")), //tbd + // telefon: Some(Str20(String::from("030-1234-4567"))), //tbd + // fax: Some(Str20(String::from("030-1234-4567"))), //tbd + // mobiltelefon: None, //tbd + // e_mail: Some(Str50(String::from("info@cloud.mc"))), //tbd + // }; + // let adresse_kontakt_rech = Kunde { + // adresse: adresse_rech, + // kontakt: Some(kontakt_rech), + // }; + // let adresse_zustell = Adresse { + // name1: Str35(String::from("MC Node Org")), //tbd + // strasse: Str22(String::from("Abbestr.")), //tbd + // hausnummer: Some(Str10(String::from("2-12"))), //tbd + // ort: Str40(String::from("Berlin")), //tbd + // postleitzahl: Str10(String::from("10587")), //tbd + // iso_3166: Str2(String::from("DE")), //tbd + // ..Default::default() + // }; + // let kontakt_zustell = Kontakt { + // ansprechpartner: Str100(String::from("Sekretaritat")), //tbd + // telefon: Some(Str20(String::from("030-1234-4567"))), //tbd + // fax: Some(Str20(String::from("030-1234-4567"))), //tbd + // mobiltelefon: None, //tbd + // e_mail: Some(Str50(String::from("info@cloud.mc"))), //tbd + // }; + // let adresse_kontakt_zustell = Kunde { + // adresse: adresse_zustell, + // kontakt: Some(kontakt_zustell), + // }; + + let year = time::OffsetDateTime::now_utc().year(); + + let geraet = Geraet { + satz_i_d: UInt(1), //tbd + antragsjahr: Jahr(format!("{:04}", year)), //tbd + eichschein: Bool(false), //tbd + bestellnummer: Some(Str50(String::from("20-08-21_111111"))), //tbd + rechnungszustellung: Rechnungszustellung(Rechnungszustellungen::R2), //tbd + standort: adresse_kontakt_ort, + rechnungsempfaenger: None, + // rechnungsempfaenger: Some(adresse_kontakt_rech), + zustelladresse: None, + // zustelladresse: Some(adresse_kontakt_zustell), + geraeteidentifikation, + geraetebeschreibung: Geraetebeschreibung { + inner: geraetebeschreibung, + }, + ..Default::default() + }; + + devices.push(geraet); + Ok(()) +} + +pub async fn login_for_demol_token(service: &ServiceDestination) -> Result<String, PE> { + fn encoder2<'a>(s: &'a str) -> std::borrow::Cow<'a, str> { + utf8_percent_encode(s, QUERY_PARAM).into() + } + + // let x = option_env!("ECX_MC_BACKEND_DEMOL_PASS").unwrap_or_default(); + let x = option_env!("MC_BACKEND_DEMOL_PASS").unwrap_or_default(); + // Demol does not work with queries that are application/x-www-form-urlencoded :'( + ciddebug!("Demol-Secret: {}", x); + let mut url = Url::parse("https://example.org/api/v1/token").map2pe_id_error(ctx!())?; + + // url.set_host(Some(&McBackendConfig::get_global().demol_host)) + // .map2pe_id_error(ctx!())?; + + let p_enc = encoder2(x); + let u_enc = encoder2("PTB"); + let encoded = format!("password={p_enc}&username={u_enc}"); + url.set_query(Some(&encoded)); + + let login_request = RawRequest { + date: OffsetDateTime::now_local() + .unwrap_or_else(|_| OffsetDateTime::now_utc()) + .format(&Rfc2822) + .unwrap_or_default() + .into_bytes(), + method: Method::GET, + url: format!( + "{}?{}#{}", + url.path(), + url.query().unwrap_or_default(), + url.fragment().unwrap_or_default() + ), + ..Default::default() + }; + let resp = asynctor!(send_client_request;login_request, &service)?; + let mut buffer = Vec::new(); + asynctor!(collect_raw_client_request;resp, &service, &mut buffer)?; + + String::from_utf8(buffer).map2pe_id_warn(ctx!()) +} diff --git a/src/backend/utils/demol/structs.rs b/src/backend/utils/demol/structs.rs new file mode 100644 index 0000000000000000000000000000000000000000..9e2b6a85f82279f3c66439b8605b8a8e719ff89c --- /dev/null +++ b/src/backend/utils/demol/structs.rs @@ -0,0 +1,647 @@ +#![allow(missing_docs)] +#![allow(clippy::upper_case_acronyms)] + +use dpsfw_types::error::{PEable, PE}; +use dpsfw_types::{ctx, StdInt}; +use dp_proc_macros::DemolCompatible; + +use serde::{Deserialize, Serialize}; +use serde_repr::*; + +// #[cfg(feature = "default_verbose")] +// use default_verbose::{CommentedVerboseDefault, DefaultVerbose}; +// #[cfg(feature = "default_verbose")] +// use dp_proc_macros::DefaultVerbose; + +pub trait DEMOLCompatible { + fn try_compatible(self, field: &str, warnings: &mut Vec<PE>) -> Result<Self, PE> + where + Self: std::marker::Sized; +} +impl<T: DEMOLCompatible> DEMOLCompatible for Option<T> { + fn try_compatible(self, field: &str, warnings: &mut Vec<PE>) -> Result<Self, PE> { + match self { + None => Ok(None), + Some(inner) => Ok(Some(inner.try_compatible(field, warnings)?)), + } + } +} +impl<T: DEMOLCompatible> DEMOLCompatible for Vec<T> { + fn try_compatible(self, field: &str, warnings: &mut Vec<PE>) -> Result<Self, PE> { + let mut compatible_vec = Vec::new(); + for item in self { + compatible_vec.push(item.try_compatible(field, warnings)?) + } + Ok(compatible_vec) + } +} +// base str types +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct Str2(pub String); +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct Str10(pub String); +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct Str20(pub String); +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct Str22(pub String); +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct Str27(pub String); +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct Str35(pub String); +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct Str40(pub String); +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct Str50(pub String); +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct Str100(pub String); +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct Str1024(pub String); + +impl DEMOLCompatible for Str2 { + fn try_compatible(mut self, field: &str, warnings: &mut Vec<PE>) -> Result<Self, PE> { + if self.0.len() > 2 { + warnings.push(PE::smart_contract_error( + &format!( + "{} exceededs string length limitation of DEMOL ({})", + field, 2 + ), + Vec::new(), + )); + self.0.truncate(2); + }; + Ok(self) + } +} +impl DEMOLCompatible for Str10 { + fn try_compatible(mut self, field: &str, warnings: &mut Vec<PE>) -> Result<Self, PE> { + if self.0.len() > 10 { + warnings.push(PE::smart_contract_error( + &format!( + "{} exceededs string length limitation of DEMOL ({})", + field, 10 + ), + Vec::new(), + )); + self.0.truncate(10); + }; + Ok(self) + } +} +impl DEMOLCompatible for Str20 { + fn try_compatible(mut self, field: &str, warnings: &mut Vec<PE>) -> Result<Self, PE> { + if self.0.len() > 20 { + warnings.push(PE::smart_contract_error( + &format!( + "{} exceeded string length limitation of DEMOL ({})", + field, 20 + ), + Vec::new(), + )); + self.0.truncate(20); + }; + Ok(self) + } +} +impl DEMOLCompatible for Str22 { + fn try_compatible(mut self, field: &str, warnings: &mut Vec<PE>) -> Result<Self, PE> { + if self.0.len() > 22 { + warnings.push(PE::smart_contract_error( + &format!( + "{} exceeded string length limitation of DEMOL ({})", + field, 22 + ), + Vec::new(), + )); + self.0.truncate(22); + }; + Ok(self) + } +} +impl DEMOLCompatible for Str27 { + fn try_compatible(mut self, field: &str, warnings: &mut Vec<PE>) -> Result<Self, PE> { + if self.0.len() > 27 { + warnings.push(PE::smart_contract_error( + &format!( + "{} exceeded string length limitation of DEMOL ({})", + field, 27 + ), + Vec::new(), + )); + self.0.truncate(27); + }; + Ok(self) + } +} +impl DEMOLCompatible for Str35 { + fn try_compatible(mut self, field: &str, warnings: &mut Vec<PE>) -> Result<Self, PE> { + if self.0.len() > 35 { + warnings.push(PE::smart_contract_error( + &format!( + "{} exceeded string length limitation of DEMOL ({})", + field, 35 + ), + Vec::new(), + )); + self.0.truncate(35); + }; + Ok(self) + } +} +impl DEMOLCompatible for Str40 { + fn try_compatible(mut self, field: &str, warnings: &mut Vec<PE>) -> Result<Self, PE> { + if self.0.len() > 40 { + warnings.push(PE::smart_contract_error( + &format!( + "{} exceeded string length limitation of DEMOL ({})", + field, 40 + ), + Vec::new(), + )); + self.0.truncate(40); + }; + Ok(self) + } +} +impl DEMOLCompatible for Str50 { + fn try_compatible(mut self, field: &str, warnings: &mut Vec<PE>) -> Result<Self, PE> { + if self.0.len() > 50 { + warnings.push(PE::smart_contract_error( + &format!( + "{} exceeded string length limitation of DEMOL ({})", + field, 50 + ), + Vec::new(), + )); + self.0.truncate(50); + }; + Ok(self) + } +} +impl DEMOLCompatible for Str100 { + fn try_compatible(mut self, field: &str, warnings: &mut Vec<PE>) -> Result<Self, PE> { + if self.0.len() > 100 { + warnings.push(PE::smart_contract_error( + &format!( + "{} exceeded string length limitation of DEMOL ({})", + field, 100 + ), + Vec::new(), + )); + self.0.truncate(100); + }; + Ok(self) + } +} +impl DEMOLCompatible for Str1024 { + fn try_compatible(mut self, field: &str, warnings: &mut Vec<PE>) -> Result<Self, PE> { + if self.0.len() > 1024 { + warnings.push(PE::smart_contract_error( + &format!( + "{} exceeded string length limitation of DEMOL ({})", + field, 1024 + ), + Vec::new(), + )); + self.0.truncate(1024); + }; + Ok(self) + } +} + +// other base types +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct Zeitstempel(pub String); +impl DEMOLCompatible for Zeitstempel { + fn try_compatible(self, field: &str, _warnings: &mut Vec<PE>) -> Result<Self, PE> { + // impudently stolen from here https://www.w3.org/TR/xmlschema11-2/#nt-dateTimeRep + let p = [ + "^", + "-?([1-9][0-9]{3,}|0[0-9]{3})", + "-(0[1-9]|1[0-2])", + "-(0[1-9]|[12][0-9]|3[01])", + "T(([01][0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9](.[0-9]+)?|(24:00:00(.0+)?))", + "(Z|(\\+|-)((0[0-9]|1[0-3]):[0-5][0-9]|14:00))?", + "$", + ] + .concat(); + let reg_exp = regex::Regex::new(&p).map2pe_id_error(ctx!())?; + if reg_exp.is_match(&self.0) { + Ok(self) + } else { + Err(PE::smart_contract_error( + &format!( + "{} does not comply with DEMOL pattern for Zeitstempel xs:dateTime", + field + ), + Vec::new(), + )) + } + } +} + +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct Zeit(pub String); +impl DEMOLCompatible for Zeit { + fn try_compatible(self, field: &str, _warnings: &mut Vec<PE>) -> Result<Self, PE> { + // impudently stolen from here https://www.w3.org/TR/xmlschema11-2/#nt-timeRep + let p = ["^", + " (([01][0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9](.[0-9]+)?|(24:00:00(.0+)?))(Z|(\\+|-)((0[0-9]|1[0-3]):[0-5][0-9]|14:00))?", + "$"].concat(); + let reg_exp = regex::Regex::new(&p).map2pe_id_error(ctx!())?; + if reg_exp.is_match(&self.0) { + Ok(self) + } else { + Err(PE::smart_contract_error( + &format!( + "{} does not comply with DEMOL pattern for Zeitstempel xs:dateTime", + field + ), + Vec::new(), + )) + } + } +} +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct Bool(pub bool); +impl DEMOLCompatible for Bool { + fn try_compatible(self, _field: &str, _warnings: &mut Vec<PE>) -> Result<Self, PE> { + Ok(self) + } +} + +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct UInt(pub u32); +impl DEMOLCompatible for UInt { + fn try_compatible(self, _field: &str, _warnings: &mut Vec<PE>) -> Result<Self, PE> { + Ok(self) + } +} + +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq)] +pub struct Float(pub f32); +impl DEMOLCompatible for Float { + fn try_compatible(self, _field: &str, _warnings: &mut Vec<PE>) -> Result<Self, PE> { + Ok(self) + } +} + +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct Jahr(pub String); +impl DEMOLCompatible for Jahr { + fn try_compatible(self, field: &str, _warnings: &mut Vec<PE>) -> Result<Self, PE> { + // impudently stolen from here https://www.w3.org/TR/xmlschema11-2/#nt-gYearRep + let reg_exp = regex::Regex::new( + "^-?([1-9][0-9]{3,}|0[0-9]{3})(Z|(\\+|-)((0[0-9]|1[0-3]):[0-5][0-9]|14:00))?$", + ) + .map2pe_id_error(ctx!())?; + if reg_exp.is_match(&self.0) { + Ok(self) + } else { + Err(PE::smart_contract_error( + &format!( + "{} does not comply with DEMOL pattern for Jahr xs:gYear", + field + ), + Vec::new(), + )) + } + } +} + +/******************************** +* DEMOL request related structs * +********************************/ + +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, DemolCompatible)] +#[serde(rename_all = "PascalCase")] +pub struct DigitalerEichantrag { + // #[serde(rename = "$value")] + pub dokument: Dokument, + // #[serde(rename = "$value")] + pub antragsteller: Antragsteller, + pub geraete: Geraete, +} +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, DemolCompatible)] +#[serde(rename_all = "PascalCase")] +pub struct Geraete { + #[serde(rename = "Geraet")] + pub inner: Vec<Geraet>, +} + +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq, DemolCompatible)] +#[serde(rename_all = "PascalCase")] +pub struct Dokument { + pub dateiname: Str100, + pub zeitstempel: Zeitstempel, + pub test: Bool, +} + +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq, DemolCompatible)] +#[serde(rename_all = "PascalCase")] +pub struct Antragsteller { + pub kuerzel: Str10, + pub kennung: Str10, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub e_mail: Option<Str100>, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub telefon: Option<Str20>, +} + +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, DemolCompatible)] +#[serde(rename_all = "PascalCase")] +pub struct Geraet { + pub satz_i_d: UInt, + pub antragsjahr: Jahr, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub eichkennzeichen: Option<Str50>, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub bundesland: Option<Str2>, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub dienstelle: Option<Str50>, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub antragsteller: Option<Str100>, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub termin: Option<Zeitstempel>, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub termin_dauer: Option<Zeit>, + pub eichschein: Bool, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub bestellnummer: Option<Str50>, + pub rechnungszustellung: Rechnungszustellung, + pub standort: Kunde, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub rechnungsempfaenger: Option<Kunde>, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub zustelladresse: Option<Kunde>, + pub geraeteidentifikation: GeraeteIdentifikation, + #[serde(rename = "Geraetebeschreibung")] + pub geraetebeschreibung: Geraetebeschreibung, +} + +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +pub struct Rechnungszustellung(pub Rechnungszustellungen); +#[derive(Serialize_repr, Deserialize_repr, Debug, Clone, PartialEq, Eq)] +#[repr(u8)] +pub enum Rechnungszustellungen { + #[serde(rename = "0")] + R0, + #[serde(rename = "1")] + R1, + #[serde(rename = "2")] + R2, + #[serde(rename = "3")] + R3, +} +impl Default for Rechnungszustellung { + fn default() -> Self { + Rechnungszustellung(Rechnungszustellungen::R0) + } +} +impl DEMOLCompatible for Rechnungszustellung { + fn try_compatible(self, _field: &str, _warnings: &mut Vec<PE>) -> Result<Self, PE> { + Ok(self) + } +} +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, DemolCompatible)] +pub struct Geraetebeschreibung { + #[serde(rename = "$value")] + pub inner: Vec<GeraetebeschreibungVariants>, +} + +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, DemolCompatible)] +#[serde(rename_all = "PascalCase")] +pub struct Kunde { + pub adresse: Adresse, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub kontakt: Option<Kontakt>, +} + +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, DemolCompatible)] +#[serde(rename_all = "PascalCase")] +pub struct Adresse { + pub name1: Str35, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub name2: Option<Str27>, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub name3: Option<Str40>, + #[serde(rename = "ISO_3166")] + pub iso_3166: Str2, + pub strasse: Str22, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub hausnummer: Option<Str10>, + pub ort: Str40, + pub postleitzahl: Str10, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub postfach: Option<Str10>, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub ort_postfach: Option<Str40>, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub p_l_z_postfach: Option<Str10>, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub breitengrad: Option<Float>, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub laengengrad: Option<Float>, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub gemeindekennziffer: Option<UInt>, +} + +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq, DemolCompatible)] +#[serde(rename_all = "PascalCase")] +pub struct Kontakt { + pub ansprechpartner: Str100, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub telefon: Option<Str20>, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub fax: Option<Str20>, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub mobiltelefon: Option<Str20>, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub e_mail: Option<Str50>, +} + +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq, DemolCompatible)] +#[serde(rename_all = "PascalCase")] +pub struct GeraeteIdentifikation { + pub messgeraete_art: MessgeraeteArt, + pub hersteller: Str50, + pub typ: Str50, + pub identnummer: Str50, + pub kunden_identnummer: Str50, +} +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +pub struct MessgeraeteArt(pub MessgeraeteArten); +#[derive(Serialize_repr, Deserialize_repr, Debug, Clone, PartialEq, Eq)] +#[repr(u8)] +pub enum MessgeraeteArten { + MA0 = 0, // <!-- Ungültig --> + MA11 = 11, // <!-- 11: Waage (Klasse III oder IIII)--> + MA12 = 12, // <!-- 12: Waage (Klasse III oder IIII) für Kassensystem (POS) --> + MA13 = 13, // <!-- 13: PC für Kassensystem (POS)--> + MA14 = 14, // <!-- 14: Waage (Klasse III oder IIII) für Kassensystem (POS & PC)--> + MA15 = 15, // <!-- 15: Selbsttätige Waage--> + MA21 = 21, // <!-- 21: Kraftstoffzapfanlage ohne Mengenumwerter --> + MA22 = 22, // <!-- 22: Kraftstoffzapfanlage mit Mengenumwerter --> + MA23 = 23, // <!-- 23: Tankwagen --> + MA31 = 31, // <!-- 31: Abgasmessgerät für CO --> + MA32 = 32, // <!-- 32: Abgasmessgerät für CO, CO2, HC, O2 --> + MA33 = 33, // <!-- 33: Abgasmessgerät für Dieselruß --> + MA41 = 41, // <!-- 41: Reifenluftdruckmessgerät --> + MA42 = 42, // <!-- 42: Reifenluftdruckautomat --> + MA51 = 51, // <!-- 51: Taxameter --> + MA52 = 52, // <!-- 52: Wegstreckenzähler --> + MA99 = 99, // <!-- 99: Sonstige Messgeräte --> +} +impl From<i32> for MessgeraeteArten { + fn from(v: i32) -> Self { + match v { + x if x == Self::MA11 as StdInt => Self::MA11, + x if x == Self::MA12 as StdInt => Self::MA12, + x if x == Self::MA13 as StdInt => Self::MA13, + x if x == Self::MA14 as StdInt => Self::MA14, + x if x == Self::MA15 as StdInt => Self::MA15, + x if x == Self::MA21 as StdInt => Self::MA21, + x if x == Self::MA22 as StdInt => Self::MA22, + x if x == Self::MA23 as StdInt => Self::MA23, + x if x == Self::MA31 as StdInt => Self::MA31, + x if x == Self::MA32 as StdInt => Self::MA32, + x if x == Self::MA33 as StdInt => Self::MA33, + x if x == Self::MA41 as StdInt => Self::MA41, + x if x == Self::MA42 as StdInt => Self::MA42, + x if x == Self::MA51 as StdInt => Self::MA51, + x if x == Self::MA52 as StdInt => Self::MA52, + x if x == Self::MA99 as StdInt => Self::MA99, + _ => Self::MA0, + } + } +} +impl Default for MessgeraeteArt { + fn default() -> Self { + MessgeraeteArt(MessgeraeteArten::MA0) + } +} +impl DEMOLCompatible for MessgeraeteArt { + fn try_compatible(self, _field: &str, _warnings: &mut Vec<PE>) -> Result<Self, PE> { + Ok(self) + } +} + +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq, DemolCompatible)] +#[serde(rename_all = "PascalCase")] +pub enum GeraetebeschreibungVariants { + // TODO: impl other GeraeteBeschreibung variants + // <xs:element name="Waage" type ="Typ_Geraet_Waage" /> + #[serde(rename = "Waage_POS")] + #[serde(rename_all = "PascalCase")] + WaagePOS { + kenngroesse: Kenngroesse, + #[serde(rename = "Pruefort_Messgeraet")] + pruefort_messgeraet: Str20, + }, + // <xs:element name="PC_POS" type ="Typ_Geraet_PC_POS" /> + // <xs:element name="Waage_PC_POS" type ="Typ_Geraet_Waage_PC_POS" /> + // <xs:element name="Zapfpunkt" type ="Typ_Geraet_Zapfpunkt" /> + // <xs:element name="Tankwagen" type ="Typ_Geraet_Tankwagen" /> + // <xs:element name="Abgasmessgeraet" type ="Typ_Geraet_Abgasmessgeraet" /> + // <xs:element name="Reifenluftdruck" type ="Typ_Geraet_Reifenluftdruck" /> + // <xs:element name="Taxameter" type ="Typ_Geraet_Taxameter" /> + // <xs:element name="Wegstreckenzaehler" type ="Typ_Geraet_WSZ" /> + // <xs:element name="Sonstige_Messgeraete" type ="Typ_Geraet_Sonstige" /> +} +impl Default for GeraetebeschreibungVariants { + fn default() -> Self { + GeraetebeschreibungVariants::WaagePOS { + kenngroesse: Kenngroesse::default(), + pruefort_messgeraet: Str20::default(), + } + } +} + +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct Kenngroesse(pub String); +impl DEMOLCompatible for Kenngroesse { + fn try_compatible(self, field: &str, _warnings: &mut Vec<PE>) -> Result<Self, PE> { + let reg_exp = + regex::Regex::new("^([0-9]){1,5}(,[0-9]){0,1} (g|kg)$").map2pe_id_error(ctx!())?; + if reg_exp.is_match(&self.0) { + Ok(self) + } else { + Err(PE::smart_contract_error(&format!("{} does not comply with DEMOL pattern for Kenngroesse [([0-9]){{1,5}}(\\,[0-9]){{0,1}} (g|kg)]", field), Vec::new())) + } + } +} + +/******************************** +* DEMOL reply related structs * +********************************/ +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct DeaReply { + #[serde(rename = "type")] + pub ty: String, + pub status: StdInt, + pub title: String, + pub detail: String, + #[serde(default)] + #[serde(rename = "invalid-params")] + pub invalid_params: Vec<InvalidParam>, +} + +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct InvalidParam { + #[serde(rename = "recordId")] + pub record_id: StdInt, + pub reason: String, +} +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq, DemolCompatible)] +#[serde(rename_all = "PascalCase")] +#[serde(rename = "DigitalerEichantrag_Antwort")] +pub struct DigitalerEichantragAntwort { + pub dokument: DokumentReply, + pub geraete: GeraeteReply, +} + +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq, DemolCompatible)] +#[serde(rename_all = "PascalCase")] +pub struct DokumentReply { + pub dateiname: Str100, + pub zeitstempel: Zeitstempel, +} + +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq, DemolCompatible)] +#[serde(rename_all = "PascalCase")] +pub struct GeraeteReply { + #[serde(rename = "Geraet")] + pub inner: Vec<GeraetReply>, +} + +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq, DemolCompatible)] +#[serde(rename_all = "PascalCase")] +pub struct GeraetReply { + pub satz_i_d: UInt, + pub antragsjahr: Jahr, + pub bestellnummer: Str50, + pub ergebnis: Ergebnis, + pub zustaendiges_bundesland: ZustaendigesBundesland, + pub zustaendiges_eichamt: ZustaendigesEichamt, +} + +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct Ergebnis(pub String); +impl DEMOLCompatible for Ergebnis { + fn try_compatible(self, _field: &str, _warnings: &mut Vec<PE>) -> Result<Self, PE> { + Ok(self) + } +} +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct ZustaendigesBundesland(pub String); +impl DEMOLCompatible for ZustaendigesBundesland { + fn try_compatible(self, _field: &str, _warnings: &mut Vec<PE>) -> Result<Self, PE> { + Ok(self) + } +} +#[derive(Deserialize, Serialize, Debug, Clone, Default, PartialEq, Eq)] +pub struct ZustaendigesEichamt(pub String); +impl DEMOLCompatible for ZustaendigesEichamt { + fn try_compatible(self, _field: &str, _warnings: &mut Vec<PE>) -> Result<Self, PE> { + Ok(self) + } +} diff --git a/src/backend/utils/mod.rs b/src/backend/utils/mod.rs new file mode 100644 index 0000000000000000000000000000000000000000..bd0766636985b7fc42487a7c8c6cc9f7b45dafdb --- /dev/null +++ b/src/backend/utils/mod.rs @@ -0,0 +1,11 @@ +/// everything to interact with demol +pub mod demol; +/// helper for the methods from opcua +pub mod opcua; + +pub mod rest_http; + +// /// filter for methods from index +// pub mod filters; +// /// helper for the methods from index +// pub mod index; diff --git a/src/backend/utils/opcua.rs b/src/backend/utils/opcua.rs new file mode 100644 index 0000000000000000000000000000000000000000..ec1c5d74b4713b7c73480f4e5f1741f41160d714 --- /dev/null +++ b/src/backend/utils/opcua.rs @@ -0,0 +1,202 @@ +use std::process::Stdio; +use tokio::process::Command; + +use crate::backend::types::opcua::*; +use crate::db::types::io_types::*; +use backend_lib::init::BackendConfig; +use dpsfw::api_method::RawRequest; +use dpsfw::dp_utils::clienting::{perform_client_request, ServiceDestination}; +use dpsfw::http::Method; +use dpsfw_types::config::GlobalConfigured; +use dpsfw_types::ctx; +use dpsfw_types::dp::comm_link::{CommLink, LinkContentEncoding}; +use dpsfw_types::dp::services::NodeService; +use dpsfw_types::error::PE; +use utils::log::{log_enabled, Level}; +use utils::{asynctor, ciddebug, ciderror}; + +/// Creates and session and connects to the url via opcua method 'connect_and_activate' +pub async fn start_process(call: OPCUACall, token: Vec<u8>, errors: &mut Vec<PE>) -> OPCUAResult { + // take the mcuid from the call + let id = match &call { + OPCUACall::Method(r) => r.mcudid.clone(), + OPCUACall::Get(r) => r.mcudid.clone(), + }; + + // prepare input for, and execute get_mi_data method to receive endpoint link from db + let get_mi_data_input = vec![id]; + let mut request = + RawRequest::new_from_struct(get_mi_data_input, Method::POST, "/mc/mid/get_data/"); + request.token = token; + let mut get_mi_result = GetMiDataOutput::default(); + if let Err(_error) = asynctor!(cid, perform_client_request; &request, &ServiceDestination::Internal(NodeService::DB, None), &mut get_mi_result, errors) + { + return OPCUAResult::Error("Failed to receive response from local database.".to_string()); + } + + // check if output from get_mi_data is correct + if get_mi_result.is_empty() { + errors.push( + PE::opcua_connection_error("measuring instrument not in database").id_warn(ctx!()), + ); + return OPCUAResult::Ok("".to_string()); + } + let mi_data = &get_mi_result[0].data; + + // take the endpoint from the database and all other values from the ini + let endpoint = match ( + mi_data.mi_link_enc.get_first_val(), + mi_data.mi_link.get_first_val(), + ) { + // TODO: What if there is dissens? + (Some(LinkContentEncoding::Opcua), Some(CommLink(conn_str))) => conn_str, + (Some(_), Some(_)) => { + return OPCUAResult::Error("Got non-opcua link for opcua-connector".to_string()) + } + (Some(_), None) => { + errors.push( + PE::opcua_connection_error("measuring instrument has no 'mi_link'") + .id_debug(ctx!()), + ); + return OPCUAResult::Ok("".to_string()); + } + (None, Some(_)) => { + errors.push( + PE::opcua_connection_error("measuring instrument has no 'mi_link_enc'") + .id_debug(ctx!()), + ); + return OPCUAResult::Ok("".to_string()); + } + (None, None) => { + errors.push( + PE::opcua_connection_error( + "measuring instrument has neither 'mi_link' nor 'mi_link_enc'", + ) + .id_debug(ctx!()), + ); + return OPCUAResult::Ok("".to_string()); + } + }; + + // let endpoint = &mi_data.mi_link; + let port = endpoint + .port() + .unwrap_or_else(|| BackendConfig::get_global().opcua_port) + .to_string(); + let data = match serde_json::to_string(&call) { + Ok(s) => s, + Err(_e) => { + return OPCUAResult::Error( + "Failed to convert the OPCUACall to a json string".to_string(), + ) + } + }; + let path_opcua = &BackendConfig::get_global().opcua_client_bin; + let path_debug = &BackendConfig::get_global().opcua_client_debug; + let timeout = BackendConfig::get_global().opcua_timeout.to_string(); + + // build arguments for the opcua client + let mut args = vec![ + "-a", + endpoint.host_str().unwrap_or_default(), + "-p", + &port, + "-d", + &data, + "-t", + &timeout, + ]; + if log_enabled!(Level::Debug) { + args.push("-v"); + args.push(path_debug); + } + + ciddebug!("Starting opcua with command line arguments: '{:?}'", args); + + // execute opcua client + let opcua_process = Command::new(path_opcua) + .args(&args) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .unwrap(); + + // wait until process finished + let res = match opcua_process.wait_with_output().await { + Ok(res) => res, + Err(e) => { + ciderror!("opcua failed to wait for the opcua_client to finish, {}", e); + return OPCUAResult::Error( + "opcua failed to wait for the opcua_client to finish".to_string(), + ); + } + }; + + if !res.status.success() { + let mesg = format!("opcua exited with StatusCode {:?}", res.status.code()); + ciderror!("{}\n{}", mesg, String::from_utf8_lossy(&res.stderr)); + return OPCUAResult::Error(mesg); + } + + // check and parse response + // let mut response = String::new(); + let response = match String::from_utf8(res.stdout) { + Ok(s) => s, + Err(e) => { + ciderror!( + "opcua failed to parse the stdout from the opcua_client process {}", + e + ); + return OPCUAResult::Error( + "opcua failed to parse the stdout from the opcua_client process".to_string(), + ); + } + }; + // if let Err(e) = output.read_to_string(&mut response) { + // error!("Failed to read the response from the opcua_client, {}", e); + // return OPCUAResult::Error("Failed to read the response from the opcua_client".to_string()); + // } + + ciddebug!("opcua_client returned: '{}'", &response); + + for line in response.split('\n') { + ciddebug!("Parsing line: {}", &line); + if line.starts_with("FINISHED") { + match line.get(9..) { + Some(s) => { + ciddebug!("Response from opcua client: {}", s); + return OPCUAResult::Ok(s.to_string()); + } + None => { + ciderror!("Failed to parse the response from the opcua_client"); + return OPCUAResult::Error( + "Failed to parse the response from the opcua_client".to_string(), + ); + } + } + } else if line.starts_with("ERROR") { + match line.get(6..) { + Some(s) => match s { + "TIMEOUT" => { + return OPCUAResult::Error("opcua client threw timeout".to_string()) + } + _ => { + return OPCUAResult::Error( + "opcua client threw an error, check client".to_string(), + ) + } + }, + None => { + ciderror!("Failed to parse the response from the opcua_client"); + return OPCUAResult::Error( + "Failed to parse the response from the opcua_client".to_string(), + ); + } + } + } + } + + OPCUAResult::Error( + "client did not return a 'FINISHED' or 'ERROR', check implementation".to_string(), + ) +} diff --git a/src/backend/utils/rest_http.rs b/src/backend/utils/rest_http.rs new file mode 100644 index 0000000000000000000000000000000000000000..53137cfbc132f16158b5cddb9ea6d42835aa4a82 --- /dev/null +++ b/src/backend/utils/rest_http.rs @@ -0,0 +1,108 @@ +use dpsfw::http::{Method, StatusCode}; +use dpsfw::{api_method::RawRequest, dp_utils::clienting::perform_client_request}; +use dpsfw_types::{ + dp::{ + comm_link::{CommLink, LinkContentEncoding}, + uids::uri::MimeType, + }, + error::PE, + EmptyIOData, +}; +use serde::Serialize; +use utils::asynctor; + +use crate::backend::types::mi_comm::{ + SoftwareHashInput, SoftwareHashOutput, SoftwareVersionOutput, StartUpdateInput, + StartUpdateOutput, +}; + +pub(crate) async fn get_hash( + encoding: &LinkContentEncoding, + link: &CommLink, + errors: &mut Vec<PE>, + input: SoftwareHashInput, +) -> Result<String, PE> { + let (raw_input, mime) = data2serialize(encoding, input)?; + + let mut request = RawRequest::new_empty(); + request.method = Method::POST; + request.url = "/software_hash/".to_string(); + request.mime_type = mime; + request.raw_input = raw_input.into(); + let service = link.try_into()?; + let mut output = SoftwareHashOutput::default(); + + asynctor!(cid, perform_client_request;&request, &service, &mut output, errors)?; + + Ok(output.software_hash) +} + +pub(crate) async fn start_update( + encoding: &LinkContentEncoding, + link: &CommLink, + errors: &mut Vec<PE>, + input: StartUpdateInput, +) -> Result<StatusCode, PE> { + let (raw_input, mime) = data2serialize(encoding, input)?; + + let mut request = RawRequest::new_empty(); + request.method = Method::POST; + request.url = "/start_update/".to_string(); + request.mime_type = mime; + request.raw_input = raw_input.into(); + let service = link.try_into()?; + let mut output = StartUpdateOutput::default(); + + asynctor!(cid, perform_client_request;&request, &service, &mut output, errors)?; + + match output.status { + _ => Ok(StatusCode::OK), + } +} + +pub async fn get_version( + encoding: &LinkContentEncoding, + link: &CommLink, + errors: &mut Vec<PE>, +) -> Result<String, PE> { + let input = EmptyIOData {}; // it's a GET-request so empty input body + let (raw_input, mime) = data2serialize(encoding, input)?; + + let mut request = RawRequest::new_empty(); + request.method = Method::GET; + request.url = "/software_version/".to_string(); + request.mime_type = mime; + request.raw_input = raw_input.into(); + let service = link.try_into()?; + let mut output = SoftwareVersionOutput::default(); + + asynctor!(cid, perform_client_request;&request, &service, &mut output, errors)?; + + Ok(output.installed_version) +} + +fn data2serialize<T: Serialize>( + encoding: &LinkContentEncoding, + input: T, +) -> Result<(String, MimeType), PE> { + let (body, mime) = match encoding { + LinkContentEncoding::Xml => ( + quick_xml::se::to_string(&input).unwrap_or_default(), + MimeType::XML, + ), + LinkContentEncoding::Json => ( + serde_json::to_string(&input).unwrap_or_default(), + MimeType::JSON, + ), + _ => { + return Err(PE::external_connection_error( + format!( + "Can not connect to {:?}-encoded link via http-connector", + encoding + ), + Vec::new(), + )) + } + }; + Ok((body, mime)) +} diff --git a/src/db/init.rs b/src/db/init.rs new file mode 100644 index 0000000000000000000000000000000000000000..bcce99e867a0a8a6627d46fd2edbabd345ab1b59 --- /dev/null +++ b/src/db/init.rs @@ -0,0 +1,44 @@ +use db_lib::init::{SchemaData, UidSchemaMap}; +pub use dpsfw::init::Initing; +use dpsfw_types::db::db_types::DBDerived; +use dpsfw_types::error::PE; +use utils::cidinfo; +use utils::ini::Ini; + +pub use async_trait::async_trait; +use std::collections::HashMap; + +use super::types::io_types::{MiDataIO, SeriesDataIO}; + +/// backend init functions +pub struct McDbInit {} +#[async_trait] +impl Initing for McDbInit { + async fn init(self, _ini: &Ini) -> Result<(), PE> { + cidinfo!("Initing with MC-Schema {}", crate::MC_SCHEMA_VERSION); + Ok(()) + } +} + +impl McDbInit { + pub fn get_uid_schema_map() -> UidSchemaMap { + let mut mc_uids_schema_map = HashMap::new(); + mc_uids_schema_map.insert( + crate::mc::uids::MCUD_ID, + SchemaData { + schema: super::MC_SCHEMA, + main_table: MiDataIO::TABLE, + pk_col: MiDataIO::PK.expect("IndexTable has no Primary Key"), + }, + ); + mc_uids_schema_map.insert( + crate::mc::uids::MCUS_ID, + SchemaData { + schema: super::MC_SCHEMA, + main_table: SeriesDataIO::TABLE, + pk_col: SeriesDataIO::PK.expect("IndexTable has no Primary Key"), + }, + ); + mc_uids_schema_map + } +} diff --git a/src/db/methods/mid.rs b/src/db/methods/mid.rs new file mode 100644 index 0000000000000000000000000000000000000000..3567e62410b1f8e4e6268001a29811536c48795c --- /dev/null +++ b/src/db/methods/mid.rs @@ -0,0 +1,524 @@ +//! Methods regarding measuring_instruments + +use async_trait::async_trait; +use std::collections::{HashMap, HashSet}; + +use crate::db::types::io_types::*; +use crate::mc::uids::{MCUDID, MCUSID}; +use db_lib::db_utils::connector::*; +use db_lib::db_utils::query::DpQuery; +use db_lib::db_utils::translator::{PGDerivedTranslator, SilentTranslator, SingleColumnTranslator}; +use db_lib::db_utils::*; +use db_lib::init::DbConfig; +use dpsfw::api_method::{Processing, ProcessingReply, RawRequest, SelfDeclaration}; +use dpsfw::dp_utils::clienting::{perform_client_request, ServiceDestination}; +use dpsfw::http::{Method, StatusCode}; +use dpsfw::processing_meta_store::ProcessingMetaStore; +use dpsfw::self_declare; +use dpsfw_types::config::GlobalConfigured; +use dpsfw_types::db::io_types::*; +use dpsfw_types::dp::{ + services::NodeService, + uids::{ULID, UPID}, +}; +use dpsfw_types::error::PE; +use dpsfw_types::merge::MergeLeaf; +use dpsfw_types::{EmptyIOData, StdInt}; +use utils::asynctor; + +/**************************************************************************************************/ +self_declare!(GetAllMethod, comments:r#" +try to get all accessible mcudids +"#); +#[async_trait] +impl Processing for GetAllMethod { + type Input = EmptyIOData; + type Output = Vec<MCUDID>; + async fn execute_call( + &self, + _request: &RawRequest, + _input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + pms: &mut ProcessingMetaStore, + ) -> ProcessingReply { + //execute query + let db = &DbConfig::get_global().pg_database_name; + let mut query = DpQuery::new_select(MiDataIO::TABLE, vec![MiDataIO::try_pk()?]); + let result: Vec<MCUDID> = perform_sql_query_once( + &mut query, + &SingleColumnTranslator {}, + errors, + db, + &get_username_from_id(&pms.local_access_group), + ) + .await?; + + check_vec_not_empty(&result)?; + *output = result; + Ok(StatusCode::OK) + } +} + +/**************************************************************************************************/ +self_declare!(GetAllBySeriesMethod, comments:r#" +try to get all accessible mcudids belonging to a certain series +"#); +#[async_trait] +impl Processing for GetAllBySeriesMethod { + type Input = MCUSID; + type Output = Vec<MCUDID>; + async fn execute_call( + &self, + _request: &RawRequest, + input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + pms: &mut ProcessingMetaStore, + ) -> ProcessingReply { + //execute query + let db = &DbConfig::get_global().pg_database_name; + let username = get_username_from_id(&pms.local_access_group); + let mut client = connect_async_to_db(db, &username).await?; + let mut query = DpQuery::new_select(MiDataIO::TABLE, vec![MiDataIO::try_pk()?]); + let cond_params = vec![vec![Box::new(input) as PgParam]]; + query.set_condition(cond_params, "mcusid = ${}")?; + let result: Vec<MCUDID> = exec_and_translate_sql_query_once( + &mut client, + &mut query, + &SingleColumnTranslator {}, + errors, + ) + .await?; + + check_vec_not_empty(&result)?; + *output = result; + Ok(StatusCode::OK) + } +} + +/**************************************************************************************************/ +self_declare!(GetDataMethod, comments:r#" +returns the data of the MIs of the mcudids listed in the input object +"#); +#[async_trait] +impl Processing for GetDataMethod { + type Input = Vec<MCUDID>; + type Output = GetMiDataOutput; + #[allow(clippy::ptr_arg)] + async fn execute_call( + &self, + _request: &RawRequest, + input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + pms: &mut ProcessingMetaStore, + ) -> ProcessingReply { + if input.is_empty() { + errors.push(PE::malformed_input("Input vector of MCUDIDs was empty.")); + return Ok(StatusCode::BAD_REQUEST); + } + let db = &DbConfig::get_global().pg_database_name; + let username = get_username_from_id(&pms.local_access_group); + let mut client = connect_async_to_db(db, &username).await?; + + //todo check if one join query would be quicker, in that case define new rust struct that contains all data fields and perform one giant join statement. + //get mi data + let cond_params = vec![vec![Box::new(input) as PgParam]]; + let cols = get_applicable_columns4user(MiDataIO::TABLE, 'R', &mut client).await?; + let mut query = DpQuery::new_select(MiDataIO::TABLE, cols); + query.set_condition(cond_params, "mcudid = ANY(${})")?; + let vec_mi_data: Vec<MiDataIO> = exec_and_translate_sql_query_once( + &mut client, + &mut query, + &PGDerivedTranslator {}, + errors, + ) + .await?; + let mut mi_data_map = vec_mi_data + .into_iter() + .map(|data| (data.mcudid.clone(), data)) + .collect::<HashMap<MCUDID, MiDataIO>>(); + + // extract mcusids from the mcudids + // get product series data + let mcusids = input + .iter() + .map(|mcudid| MCUSID::from(mcudid.clone())) + .collect::<HashSet<MCUSID>>(); // fix source of mcusid should be mcudid + let mcusids: Vec<MCUSID> = mcusids.into_iter().collect(); + let cond_params = vec![vec![Box::new(mcusids.clone()) as PgParam]]; + let cols = get_applicable_columns4user(SeriesDataIO::TABLE, 'R', &mut client).await?; + let mut query = DpQuery::new_select(SeriesDataIO::TABLE, cols); + query.set_condition(cond_params, "mcusid = ANY(${})")?; + let vec_series_data: Vec<SeriesDataIO> = exec_and_translate_sql_query_once( + &mut client, + &mut query, + &PGDerivedTranslator {}, + errors, + ) + .await + .unwrap_or_else(|e| { + errors.push(e); + Vec::new() + }); + + // if no series data is available and no mi data was found, return with error + if vec_series_data.is_empty() { + return Ok(StatusCode::OK); + }; + + // transform into map + let series_map = vec_series_data + .into_iter() + .map(|data| (data.mcusid.clone(), data)) + .collect::<HashMap<MCUSID, SeriesDataIO>>(); + + // extract device_type ids + let mut device_data_map: HashMap<StdInt, MIDeviceType> = HashMap::new(); + let device_type_ids = series_map + .iter() + .filter_map(|(_, data)| data.mid_type.get_first_val().copied()) + .collect::<HashSet<StdInt>>(); + let device_type_ids: Vec<StdInt> = device_type_ids.into_iter().collect(); + if !device_type_ids.is_empty() { + // get device type data + let cond_params = vec![vec![Box::new(&device_type_ids) as PgParam]]; + let cols = get_applicable_columns4user(MIDeviceType::TABLE, 'R', &mut client).await?; + let mut query = DpQuery::new_select(MIDeviceType::TABLE, cols); + query.set_condition(cond_params, "mid_type_id = ANY(${})")?; + let vec_device_data: Vec<MIDeviceType> = exec_and_translate_sql_query_once( + &mut client, + &mut query, + &PGDerivedTranslator {}, + errors, + ) + .await?; + + // transform into hashmap for later use + device_data_map = vec_device_data + .into_iter() + .map(|data| (data.mid_type_id, data)) + .collect::<HashMap<_, _>>(); + } + + // extract series file data + // get vector of all files with corresponding mcusid + let f_cols = FileData::get_cols().join(",f."); + // TODO: replace tablenames + let query_cmd = format!("SELECT sf.uid AS uid, sf.file_id, f.{}, ft.file_type_desc FROM (SELECT uid, file_id FROM mc_mcusid_files WHERE uid = ANY($1)) sf INNER JOIN base_files f ON sf.file_id = f.file_id INNER JOIN base_file_types ft ON ft.file_type_id = f.file_type_id;",f_cols); + let query_cond = vec![vec![Box::new(mcusids) as PgParam]]; + let mut query = DpQuery::new_raw(query_cond, &query_cmd)?; + let vec_series_files: Vec<FileDataIO<MCUSID>> = exec_and_translate_sql_query_once( + &mut client, + &mut query, + &PGDerivedTranslator {}, + errors, + ) + .await + .unwrap_or_else(|e| { + errors.push(e); + Vec::new() + }); + + // transform into hashmap + let mut series_file_map: HashMap<MCUSID, Vec<MergeLeaf<FileDataIO<MCUSID>>>> = + HashMap::new(); + for s_file in vec_series_files { + if series_file_map.contains_key(&s_file.uid) { + series_file_map + .get_mut(&s_file.uid) + .unwrap() + .push(MergeLeaf::with_current_node(s_file)); + } else { + series_file_map.insert( + s_file.uid.clone(), + vec![MergeLeaf::with_current_node(s_file)], + ); + } + } + + // extract device file data + let f_cols = FileData::get_cols().join(",f."); + // TODO: replace tablenames + let query_cmd = format!("SELECT df.uid AS uid, df.file_id, f.{}, ft.file_type_desc FROM (SELECT uid, file_id FROM mc_mcudid_files WHERE uid = ANY($1)) df INNER JOIN base_files f ON df.file_id = f.file_id INNER JOIN base_file_types ft ON ft.file_type_id = f.file_type_id;",f_cols); + let query_cond = vec![vec![Box::new(input.clone()) as PgParam]]; + let mut query = DpQuery::new_raw(query_cond, &query_cmd)?; + let vec_device_files: Vec<FileDataIO<MCUDID>> = exec_and_translate_sql_query_once( + &mut client, + &mut query, + &PGDerivedTranslator {}, + errors, + ) + .await + .unwrap_or_else(|e| { + errors.push(e); + Vec::new() + }); + + // transform into hashmap + + let mut device_file_map: HashMap<MCUDID, Vec<MergeLeaf<FileDataIO<MCUDID>>>> = + HashMap::new(); + for d_file in vec_device_files { + if device_file_map.contains_key(&d_file.uid) { + device_file_map + .get_mut(&d_file.uid) + .unwrap() + .push(MergeLeaf::with_current_node(d_file)); + } else { + device_file_map.insert( + d_file.uid.clone(), + vec![MergeLeaf::with_current_node(d_file)], + ); + } + } + + // extract persons data + // get vector of all persons with corresponding upid + let mut person_data_map: HashMap<UPID, Person> = HashMap::new(); + let upids = mi_data_map + .iter() + .filter_map(|(_, data)| data.product_owner.get_first_val().cloned()) + .collect::<HashSet<UPID>>(); + let upids: Vec<UPID> = upids.into_iter().collect(); + if !upids.is_empty() { + // get device type data + let cond_params = vec![vec![Box::new(&upids) as PgParam]]; + let cols = get_applicable_columns4user(Person::TABLE, 'R', &mut client).await?; + let mut query = DpQuery::new_select(Person::TABLE, cols); + query.set_condition( + cond_params, + &format!( + "{} = ANY(${{}})", + Person::PK.ok_or_else(|| PE::database_error(format!( + "no primary key defined for {}", + Person::TABLE + )))? + ), + )?; + let vec_person_data: Vec<Person> = exec_and_translate_sql_query_once( + &mut client, + &mut query, + &PGDerivedTranslator {}, + errors, + ) + .await?; + + // transform into hashmap for later use + person_data_map = vec_person_data + .into_iter() + .map(|data| (data.person_id.clone(), data)) + .collect::<HashMap<_, _>>(); + } + + //TODO: + // extract locations data + // get vector of all locations with corresponding ulid + let mut location_data_map: HashMap<ULID, Location> = HashMap::new(); + let ulids = mi_data_map + .iter() + .filter_map(|(_, data)| data.location.get_first_val().cloned()) + .collect::<HashSet<ULID>>(); + let ulids: Vec<ULID> = ulids.into_iter().collect(); + if !ulids.is_empty() { + // get device type data + let cond_params = vec![vec![Box::new(&ulids) as PgParam]]; + let cols = get_applicable_columns4user(Location::TABLE, 'R', &mut client).await?; + let mut query = DpQuery::new_select(Location::TABLE, cols); + query.set_condition( + cond_params, + &format!( + "{} = ANY(${{}})", + Location::PK.ok_or_else(|| PE::database_error(format!( + "no primary key defined for {}", + Location::TABLE + )))? + ), + )?; + let vec_location_data: Vec<Location> = exec_and_translate_sql_query_once( + &mut client, + &mut query, + &PGDerivedTranslator {}, + errors, + ) + .await?; + + // transform into hashmap for later use + location_data_map = vec_location_data + .into_iter() + .map(|data| (data.location_id.clone(), data)) + .collect::<HashMap<_, _>>(); + } + + // build the digital representation of each MI + let mut result: Vec<DBDigRep> = Vec::new(); + for mcudid in input { + let mcusid = MCUSID::from(mcudid.clone()); + + let series_data = series_map.get(&mcusid).cloned(); + // .unwrap_or_else(|| panic!("No series data in DB for mcusid: {:?}.", &mi.mcusid)); + let mut md = mi_data_map.remove(mcudid).unwrap_or_default(); + md.mcudid = mcudid.clone(); + let device_type_data = series_data + .as_ref() + .and_then(|sd| { + sd.mid_type + .get_first_val() + .map(|mid_type| device_data_map.get(mid_type).cloned()) + }) + .flatten() + .into(); + let mut sd = series_data.unwrap_or_default(); + sd.mcusid = mcusid; + let series_files = series_file_map + .get(&sd.mcusid) + .map(|s_f| s_f.clone()) + .unwrap_or_default(); + let device_files = device_file_map + .get(mcudid) + .map(|d_f| d_f.clone()) + .unwrap_or_default(); + + // TODO: get Locations and person + let mut persons_data: Vec<MergeLeaf<Person>> = Default::default(); + persons_data.push( + md.product_owner + .get_first_val() + .and_then(|product_owner| { + person_data_map.get(product_owner).map(|p_d| p_d.clone()) + }) + .unwrap_or_default() + .into(), + ); + let mut locations_data: Vec<MergeLeaf<Location>> = Default::default(); + locations_data.push( + md.location + .get_first_val() + .and_then(|location| location_data_map.get(location).map(|p_d| p_d.clone())) + .unwrap_or_default() + .into(), + ); + + let result_dig_rep = DBDigRep { + data: md, + series_data: sd, + device_type_data: device_type_data, + persons_data, + locations_data, + series_files, + device_files, + }; + + result.push(result_dig_rep); + } + + if result.is_empty() { + return Ok(StatusCode::OK); + }; + *output = result; + Ok(StatusCode::OK) + } +} + +/**************************************************************************************************/ +self_declare!(UpdateDataMethod, comments:r#" +update data for given mcudid +"#); +#[async_trait] +impl Processing for UpdateDataMethod { + type Input = MiDataIO; + type Output = EmptyIOData; + async fn execute_call( + &self, + _request: &RawRequest, + input: &Self::Input, + _output: &mut Self::Output, + errors: &mut Vec<PE>, + pms: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let db = &DbConfig::get_global().pg_database_name; + let access_user = get_username_from_id(&pms.local_access_group); + let mut client = connect_async_to_db(db, &access_user).await?; + + let params = [input.clone()]; + // let cond_params = vec![vec![&input.mcudid as &(dyn ToSql + Sync + Send)]]; + let cols = get_applicable_columns4user(MiDataIO::TABLE, 'W', &mut client).await?; + let mut query = + DpQuery::new_upsert(¶ms, MiDataIO::TABLE, None, cols, MiDataIO::try_pk()?)?; + // query.set_condition(cond_params.as_slice(), "mcudid = ${}")?; + exec_and_translate_sql_query_once(&mut client, &mut query, &SilentTranslator {}, errors) + .await?; + + Ok(StatusCode::OK) + } +} + +/**************************************************************************************************/ +self_declare!(InsertDataMethod, comments:r#" +insert data into measuring_instruments table +"#); +#[async_trait] +impl Processing for InsertDataMethod { + type Input = AddMiDataInput; + type Output = EmptyIOData; + async fn execute_call( + &self, + _request: &RawRequest, + input: &Self::Input, + _output: &mut Self::Output, + errors: &mut Vec<PE>, + pms: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let db = &DbConfig::get_global().pg_database_name; + let access_user = get_username_from_id(&pms.local_access_group); + let mut client = connect_async_to_db(db, &access_user).await?; + let params = input.clone(); + let cols = get_applicable_columns4user(MiDataIO::TABLE, 'W', &mut client).await?; + let mut query = DpQuery::new_insert(¶ms, MiDataIO::TABLE, None, cols)?; + exec_and_translate_sql_query(&mut client, &mut query, &SilentTranslator {}, errors).await?; + + // start notify all nodes of concerned orgs and sht + let notify_all_request = + RawRequest::new_from_struct(None::<StdInt>, Method::POST, "/index/notify_nodes/"); + let _ = asynctor!(cid, perform_client_request; ¬ify_all_request, &ServiceDestination::Internal(NodeService::ARM, None), &mut EmptyIOData{}, errors).map_err(|e| errors.push(e)); + + Ok(StatusCode::OK) + } +} +/**************************************************************************************************/ +self_declare!(DeleteDataMethod, comments:r#" +delete data from measuring_instruments table, will error if there are ForeignKeys to this +"#); +#[async_trait] +impl Processing for DeleteDataMethod { + type Input = MCUDID; + type Output = EmptyIOData; + async fn execute_call( + &self, + _request: &RawRequest, + input: &Self::Input, + _output: &mut Self::Output, + errors: &mut Vec<PE>, + pms: &mut ProcessingMetaStore, + ) -> ProcessingReply { + // get user name + let db = &DbConfig::get_global().pg_database_name; + let access_group = get_username_from_id(&pms.local_access_group); + let mut client = connect_async_to_db(db, &access_group).await?; + + // delete from measuring_instruments + let mut query = DpQuery::new_delete(MiDataIO::TABLE); + let cond_params = vec![vec![Box::new(input) as PgParam]]; + query.set_condition(cond_params, "mcudid = ${}")?; + exec_and_translate_sql_query_once(&mut client, &mut query, &SilentTranslator {}, errors) + .await?; + + // start notify all nodes of concerned orgs and sht + let notify_all_request = + RawRequest::new_from_struct(None::<StdInt>, Method::POST, "/index/notify_nodes/"); + let _ = asynctor!(cid, perform_client_request; ¬ify_all_request, &ServiceDestination::Internal(NodeService::ARM, None), &mut EmptyIOData{}, errors).map_err(|e| errors.push(e)); + + Ok(StatusCode::OK) + } +} diff --git a/src/db/methods/mod.rs b/src/db/methods/mod.rs new file mode 100644 index 0000000000000000000000000000000000000000..15e6e4d436596086ad9fd61ecb10b3bcc916a6b4 --- /dev/null +++ b/src/db/methods/mod.rs @@ -0,0 +1,4 @@ +// Methods regarding measuring_instruments +pub mod mid; +// Methods regarding product_series +pub mod series; diff --git a/src/db/methods/series.rs b/src/db/methods/series.rs new file mode 100644 index 0000000000000000000000000000000000000000..aaed0f5345401950b3dc384e5c48b071bc9493a1 --- /dev/null +++ b/src/db/methods/series.rs @@ -0,0 +1,334 @@ +//! Methods regarding product_series + +use async_trait::async_trait; +use std::collections::HashMap; +use std::collections::HashSet; + +use crate::db::types::io_types::*; +use crate::mc::uids::MCUSID; +use db_lib::db_utils::connector::*; +use db_lib::db_utils::query::DpQuery; +use db_lib::db_utils::translator::{PGDerivedTranslator, SilentTranslator, SingleColumnTranslator}; +use db_lib::db_utils::*; +use db_lib::init::DbConfig; +use dpsfw::api_method::{Processing, ProcessingReply, RawRequest, SelfDeclaration}; +use dpsfw::dp_utils::clienting::{perform_client_request, ServiceDestination}; +use dpsfw::http::{Method, StatusCode}; +use dpsfw::processing_meta_store::ProcessingMetaStore; +use dpsfw::self_declare; +use dpsfw_types::config::GlobalConfigured; +use dpsfw_types::db::io_types::*; +use dpsfw_types::dp::services::NodeService; +use dpsfw_types::error::PE; +use dpsfw_types::merge::MergeLeaf; +use dpsfw_types::{EmptyIOData, StdInt}; +use utils::asynctor; + +/**************************************************************************************************/ +self_declare!(GetAllMethod, comments:r#" +try to get all accessible mcusids +"#); +#[async_trait] +impl Processing for GetAllMethod { + type Input = EmptyIOData; + type Output = Vec<MCUSID>; + async fn execute_call( + &self, + _request: &RawRequest, + _input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + pms: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let db = &DbConfig::get_global().pg_database_name; + let mut query = DpQuery::new_select(SeriesDataIO::TABLE, vec![SeriesDataIO::try_pk()?]); + let result: Vec<MCUSID> = perform_sql_query_once( + &mut query, + &SingleColumnTranslator {}, + errors, + db, + &get_username_from_id(&pms.local_access_group), + ) + .await?; + + check_vec_not_empty(&result)?; + *output = result; + Ok(StatusCode::OK) + } +} + +/**************************************************************************************************/ +self_declare!(GetTypesMethod, comments:r#" +get all defined measuring instrument types +"#); +#[async_trait] +impl Processing for GetTypesMethod { + type Input = EmptyIOData; + type Output = Vec<MIDeviceType>; + async fn execute_call( + &self, + _request: &RawRequest, + _input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + pms: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let db = &DbConfig::get_global().pg_database_name; + let mut query = DpQuery::new_select(MIDeviceType::TABLE, MIDeviceType::get_cols()); + let result: Vec<MIDeviceType> = perform_sql_query_once( + &mut query, + &PGDerivedTranslator {}, + errors, + db, + &get_username_from_id(&pms.local_access_group), + ) + .await?; + + check_vec_not_empty(&result)?; + *output = result; + Ok(StatusCode::OK) + } +} + +/**************************************************************************************************/ +self_declare!(GetDataMethod, comments:r#" +returns the series data of the mcusids listed in the input object +"#); +#[async_trait] +impl Processing for GetDataMethod { + type Input = Vec<MCUSID>; + type Output = GetSeriesDataOutput; + #[allow(clippy::ptr_arg)] + async fn execute_call( + &self, + _request: &RawRequest, + input: &Self::Input, + output: &mut Self::Output, + errors: &mut Vec<PE>, + pms: &mut ProcessingMetaStore, + ) -> ProcessingReply { + if input.is_empty() { + errors.push(PE::malformed_input("Input vector of MCUSIDs was empty.")); + return Ok(StatusCode::BAD_REQUEST); + } + + let db = &DbConfig::get_global().pg_database_name; + let username = get_username_from_id(&pms.local_access_group); + let mut client = connect_async_to_db(db, &username).await?; + + //get series data + let cond_params = vec![vec![Box::new(input) as PgParam]]; + let cols = get_applicable_columns4user(SeriesDataIO::TABLE, 'R', &mut client).await?; + let mut query = DpQuery::new_select(SeriesDataIO::TABLE, cols); + query.set_condition(cond_params, "mcusid = ANY(${})")?; + let vec_series_data: Vec<SeriesDataIO> = exec_and_translate_sql_query_once( + &mut client, + &mut query, + &PGDerivedTranslator {}, + errors, + ) + .await?; + if vec_series_data.is_empty() { + return Ok(StatusCode::OK); + }; + + //extract series file data + let f_cols = FileData::get_cols().join(",f."); + // TODO: replace tablenames + let query_cmd = format!("SELECT sf.uid AS uid, sf.file_id, f.{}, ft.file_type_desc FROM (SELECT uid, file_id FROM mc_mcusid_files WHERE uid = ANY($1)) sf INNER JOIN base_files f ON sf.file_id = f.file_id INNER JOIN base_file_types ft ON ft.file_type_id = f.file_type_id;",f_cols); + let query_cond = vec![vec![Box::new(input.clone()) as PgParam]]; + let mut query = DpQuery::new_raw(query_cond, &query_cmd)?; + let vec_series_files: Vec<FileDataIO<MCUSID>> = exec_and_translate_sql_query_once( + &mut client, + &mut query, + &PGDerivedTranslator {}, + errors, + ) + .await + .unwrap_or_else(|e| { + errors.push(e); + Vec::new() + }); + + // transform into hashmap + + let mut series_file_map: HashMap<MCUSID, Vec<MergeLeaf<FileDataIO<MCUSID>>>> = + HashMap::new(); + for s_file in vec_series_files { + if series_file_map.contains_key(&s_file.uid) { + series_file_map + .get_mut(&s_file.uid) + .unwrap() + .push(MergeLeaf::with_current_node(s_file)); + } else { + series_file_map.insert( + s_file.uid.clone(), + vec![MergeLeaf::with_current_node(s_file)], + ); + } + } + + // extract device_type ids + let mut device_data_map: HashMap<StdInt, MIDeviceType> = HashMap::new(); + let device_type_ids = vec_series_data + .iter() + .filter_map(|data| data.mid_type.get_first_val()) + .copied() + .collect::<HashSet<StdInt>>(); + let device_type_ids: Vec<StdInt> = device_type_ids.into_iter().collect(); + if !device_type_ids.is_empty() { + // get device type data + let cond_params = vec![vec![Box::new(device_type_ids) as PgParam]]; + let cols = get_applicable_columns4user(MIDeviceType::TABLE, 'R', &mut client).await?; + let mut query = DpQuery::new_select(MIDeviceType::TABLE, cols); + query.set_condition(cond_params, "mid_type_id = ANY(${})")?; + let vec_device_data: Vec<MIDeviceType> = exec_and_translate_sql_query_once( + &mut client, + &mut query, + &PGDerivedTranslator {}, + errors, + ) + .await?; + + // transform into hashmap for later use + device_data_map = vec_device_data + .into_iter() + .map(|data| (data.mid_type_id, data)) + .collect::<HashMap<_, _>>(); + } + + let mut result: Vec<DBSeriesDigRep> = Vec::new(); + for series in vec_series_data { + let mut result_series = DBSeriesDigRep { + device_type_data: series + .mid_type + .get_first_val() + .and_then(|mid_type| device_data_map.get(mid_type).cloned()) + .into(), + ..Default::default() + }; + // if let Some(ref mcusid) = series.mcusid { + result_series.files = match series_file_map.get(&series.mcusid) { + Some(s_f) => s_f.clone(), + None => vec![], + }; + // }; + result_series.data = series; + result.push(result_series); + } + + if result.is_empty() { + return Ok(StatusCode::OK); + }; + *output = result; + Ok(StatusCode::OK) + } +} + +/**************************************************************************************************/ +self_declare!(UpdateDataMethod, comments:r#" +update data for given mcusid +"#); +#[async_trait] +impl Processing for UpdateDataMethod { + type Input = SeriesDataIO; + type Output = EmptyIOData; + async fn execute_call( + &self, + _request: &RawRequest, + input: &Self::Input, + _output: &mut Self::Output, + errors: &mut Vec<PE>, + pms: &mut ProcessingMetaStore, + ) -> ProcessingReply { + let db = &DbConfig::get_global().pg_database_name; + let access_user = get_username_from_id(&pms.local_access_group); + let mut client = connect_async_to_db(db, &access_user).await?; + + let params = [input.clone()]; + // let cond_params = vec![vec![&input.mcusid as &(dyn ToSql + Sync + Send)]]; + let cols = get_applicable_columns4user(SeriesDataIO::TABLE, 'W', &mut client).await?; + let mut query = DpQuery::new_upsert( + ¶ms, + SeriesDataIO::TABLE, + None, + cols, + SeriesDataIO::try_pk()?, + )?; + // query.set_condition(cond_params.as_slice(), "mcusid = ${}")?; + exec_and_translate_sql_query_once(&mut client, &mut query, &SilentTranslator {}, errors) + .await?; + + Ok(StatusCode::OK) + } +} + +/**************************************************************************************************/ +self_declare!(InsertDataMethod, comments:r#" +insert data into series table +"#); +#[async_trait] +impl Processing for InsertDataMethod { + type Input = AddSeriesDataInput; + type Output = EmptyIOData; + async fn execute_call( + &self, + _request: &RawRequest, + input: &Self::Input, + _output: &mut Self::Output, + errors: &mut Vec<PE>, + pms: &mut ProcessingMetaStore, + ) -> ProcessingReply { + // TODO check if user can select from + let db = &DbConfig::get_global().pg_database_name; + let access_user = get_username_from_id(&pms.local_access_group); + let mut client = connect_async_to_db(db, &access_user).await?; + let params = input.clone(); + let cols = get_applicable_columns4user(SeriesDataIO::TABLE, 'W', &mut client).await?; + let mut query = DpQuery::new_insert(¶ms, SeriesDataIO::TABLE, None, cols)?; + exec_and_translate_sql_query(&mut client, &mut query, &SilentTranslator {}, errors).await?; + + // start notify all nodes of concerned orgs and sht + let notify_all_request = + RawRequest::new_from_struct(None::<StdInt>, Method::POST, "/index/notify_nodes/"); + let _ = asynctor!(cid, perform_client_request; ¬ify_all_request, &ServiceDestination::Internal(NodeService::ARM, None), &mut EmptyIOData{}, errors).map_err(|e| errors.push(e)); + + Ok(StatusCode::OK) + } +} +/**************************************************************************************************/ +self_declare!(DeleteDataMethod, comments:r#" +delete data from product_series table, will error if there are ForeignKeys to this +"#); +#[async_trait] +impl Processing for DeleteDataMethod { + type Input = MCUSID; + type Output = EmptyIOData; + async fn execute_call( + &self, + _request: &RawRequest, + input: &Self::Input, + _output: &mut Self::Output, + errors: &mut Vec<PE>, + pms: &mut ProcessingMetaStore, + ) -> ProcessingReply { + // get user name + let db = &DbConfig::get_global().pg_database_name; + let access_group = get_username_from_id(&pms.local_access_group); + let mut client = connect_async_to_db(db, &access_group).await?; + + // delete from product_series + let mut query = DpQuery::new_delete(SeriesDataIO::TABLE); + let cond_params = vec![vec![Box::new(input) as PgParam]]; + query.set_condition(cond_params, "mcusid = ${}")?; + exec_and_translate_sql_query_once(&mut client, &mut query, &SilentTranslator {}, errors) + .await?; + + // start notify all nodes of concerned orgs and sht + let notify_all_request = + RawRequest::new_from_struct(None::<StdInt>, Method::POST, "/index/notify_nodes/"); + let _ = asynctor!(cid, perform_client_request; ¬ify_all_request, &ServiceDestination::Internal(NodeService::ARM, None), &mut EmptyIOData{}, errors).map_err(|e| errors.push(e)); + + Ok(StatusCode::OK) + } +} diff --git a/src/db/mod.rs b/src/db/mod.rs new file mode 100644 index 0000000000000000000000000000000000000000..9e45b68e21a07233c50a962e555ef021a2de0caf --- /dev/null +++ b/src/db/mod.rs @@ -0,0 +1,10 @@ +#[cfg(feature = "call_id_tokio")] +pub mod init; +#[cfg(feature = "call_id_tokio")] +pub mod methods; +#[cfg(feature = "call_id_tokio")] +pub mod routing; + +pub mod types; + +pub static MC_SCHEMA: &str = "mc"; diff --git a/src/db/routing.rs b/src/db/routing.rs new file mode 100644 index 0000000000000000000000000000000000000000..fd0e247eb592ac81fa6c92fcf4a5661384b16515 --- /dev/null +++ b/src/db/routing.rs @@ -0,0 +1,112 @@ +/*!*************************************************************************************************** +* Set Input data structure here => actually routes method to the right function * +* Set filter functions here * +**************************************************************************************************!*/ +#![allow(unreachable_patterns)] +#![allow(deprecated)] + +#[cfg(any(feature = "dpif_intra", feature = "dpif_debug"))] +use super::methods::mid; +#[cfg(any(feature = "dpif_intra", feature = "dpif_debug"))] +use super::methods::series; +#[cfg(any(feature = "dpif_intra", feature = "dpif_debug"))] +use dpsfw::meta_processors as mp; + +use dpsfw::routing::RoutingTable; + +///************************************************************************* +///* The basic routing Table that should always be merged to Service Tables * +///*************************************************************************/ +pub fn create_routing_table() -> RoutingTable { + #[allow(unused_mut)] + let mut routing_table = RoutingTable::new(); + + #[cfg(any(feature = "dpif_intra", feature = "dpif_debug"))] + routing_table.add_get( + "/mc/mid/get_all/", + mid::GetAllMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_intra", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/mid/get_all/", + mid::GetAllBySeriesMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_intra", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/mid/get_data/", + mid::GetDataMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_intra", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/mid/update_data/", + mid::UpdateDataMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_intra", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/mid/insert_data/", + mid::InsertDataMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_intra", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/mid/delete/", + mid::DeleteDataMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_intra", feature = "dpif_debug"))] + routing_table.add_get( + "/mc/series/get_types/", + series::GetTypesMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_intra", feature = "dpif_debug"))] + routing_table.add_get( + "/mc/series/get_all/", + series::GetAllMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_intra", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/series/get_data/", + series::GetDataMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_intra", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/series/update_data/", + series::UpdateDataMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_intra", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/series/insert_data/", + series::InsertDataMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + #[cfg(any(feature = "dpif_intra", feature = "dpif_debug"))] + routing_table.add_post( + "/mc/series/delete/", + series::DeleteDataMethod {}, + &[&mp::SimpleTokenAuth {}], + &[&mp::Ident {}], + ); + + let mut default_router_table = dpsfw::routing::DEFAULT_ROUTER.clone(); + routing_table.extend(&mut default_router_table); + routing_table +} diff --git a/src/db/types/db_types.rs b/src/db/types/db_types.rs new file mode 100644 index 0000000000000000000000000000000000000000..9513588b47d92e2334b4b8e86f5885ff0afcf1f4 --- /dev/null +++ b/src/db/types/db_types.rs @@ -0,0 +1,90 @@ +use serde::{Deserialize, Serialize}; + +use crate::mc::uids::{MCUDID, MCUSID}; +#[cfg(feature = "default_verbose")] +use default_verbose::{CommentedVerboseDefault, DefaultVerbose}; +#[cfg(feature = "default_verbose")] +use dp_proc_macros::DefaultVerbose; +use dp_proc_macros::{dp_wrap, Merge}; +use dpsfw_types::db::db_types::DBDerived; +#[cfg(feature = "call_id_tokio")] +use dpsfw_types::db::db_types::PGDerived; +use dpsfw_types::dp::comm_link::{CommLink, LinkContentEncoding}; +use dpsfw_types::dp::uids::{Uri, ULID, UPID}; +use dpsfw_types::error::PE; +use dpsfw_types::merge::MergeLeaf; +use dpsfw_types::{DateTime, StdInt}; + +/// Measurement instrument representation (Has to include all columns of the mi table!) +#[cfg_attr( + feature = "call_id_tokio", + dp_wrap(mergeable, pg_derived, table = "mc_measuring_instruments") +)] +#[cfg_attr( + not(feature = "call_id_tokio"), + dp_wrap(mergeable, table = "mc_measuring_instruments") +)] +#[cfg_attr(feature = "default_verbose", derive(DefaultVerbose))] +#[derive(Serialize, Deserialize, Default, Clone, Debug, PartialEq)] +pub struct MiDataIO { + #[primary_key] + pub mcudid: MCUDID, + pub prod_year: StdInt, + pub last_update: DateTime, + pub is_verified: bool, + pub mi_link: CommLink, + pub mi_link_enc: LinkContentEncoding, + pub last_calibration: DateTime, + pub location: ULID, + pub verification_authority: String, + pub owner: String, + pub mcusid: MCUSID, + pub product_owner: UPID, + pub comments: String, + pub test_location: String, // Prüfort, Waagen-/Zapfsäulenspezifischer Wert + pub product: String, // zapfsäulenspezifischer Wert: Produktart, die gezapft wird + pub product_name: String, // zapfsäulenspezifischer Wert: Produkt-Name, der gezapft wird + pub taxi_plate_number: String, // spezifischer Wert für Taxameter und Wegstreckenzähler: Kennzeichen des Taxis + pub tariff_zone: String, // spezifischer Wert für Taxameter und Wegstreckenzähler + pub status_number: String, // spezifischer Wert für Taxameter und Wegstreckenzähler: Ordnungsnummer + pub propulsion_type: String, // spezifischer Wert für Taxameter und Wegstreckenzähler +} + +/// Series representation (Has to include all columns of the mi table!) +#[cfg_attr( + feature = "call_id_tokio", + dp_wrap(mergeable, pg_derived, table = "mc_product_series") +)] +#[cfg_attr( + not(feature = "call_id_tokio"), + dp_wrap(mergeable, table = "mc_product_series") +)] +#[cfg_attr(feature = "default_verbose", derive(DefaultVerbose))] +#[derive(Serialize, Deserialize, Default, Clone, Debug, PartialEq)] +pub struct SeriesDataIO { + #[primary_key] + pub mcusid: MCUSID, + pub name: String, + pub series_image_file_id: Uri, + pub manufacturer: String, + pub latest_verified_software_version: String, + pub latest_conformity_assessment: DateTime, + pub mid_type: StdInt, + pub quantity: String, // Waagen-/Zapfsäulenspezifische Wert + pub conformity_statement: String, // Waagenspezifischer Wert: VDMA compare link +} + +/// Output of the get_process method +#[cfg_attr( + feature = "call_id_tokio", + dp_wrap(pg_derived, table = "mc_mi_device_types") +)] +#[cfg_attr(not(feature = "call_id_tokio"), dp_wrap(table = "mc_mi_device_types"))] +#[cfg_attr(feature = "default_verbose", derive(DefaultVerbose))] +#[derive(Serialize, Deserialize, Default, Clone, Debug, PartialEq, PartialOrd)] +// #[to_pg_derived()] +pub struct MIDeviceType { + pub mid_type_id: StdInt, + pub m_unit: String, + pub m_description: String, +} diff --git a/src/db/types/io_types.rs b/src/db/types/io_types.rs new file mode 100644 index 0000000000000000000000000000000000000000..456f6deef6261b23ff4f614f744b2120d91fd0b1 --- /dev/null +++ b/src/db/types/io_types.rs @@ -0,0 +1,56 @@ +use serde::{Deserialize, Serialize}; + +pub use super::db_types::*; +use crate::db_io::{Location, Person}; +use crate::mc::uids::{MCUDID, MCUSID}; +#[cfg(feature = "default_verbose")] +use default_verbose::{CommentedVerboseDefault, DefaultVerbose}; +#[cfg(feature = "default_verbose")] +use dp_proc_macros::DefaultVerbose; +use dp_proc_macros::Merge; +use dpsfw_types::db::io_types::FileDataIO; +use dpsfw_types::error::PE; +use dpsfw_types::merge::MergeLeaf; + +/*************************************************************************************************** + MI related queries +***************************************************************************************************/ + +/// Output of the get_mi_data method (returns information of a (list of) mi(s)) +pub type GetMiDataOutput = Vec<DBDigRep>; + +/// Input of the add_mi_data method (inserts new MIs into db) +pub type AddMiDataInput = Vec<MiDataIO>; + +#[allow(clippy::upper_case_acronyms)] +/// output data for a single mi +#[cfg_attr(feature = "default_verbose", derive(DefaultVerbose))] +#[derive(Serialize, Deserialize, Default, Clone, Debug, Merge)] +pub struct DBDigRep { + pub data: MiDataIO, + pub series_data: SeriesDataIO, + pub device_type_data: MergeLeaf<MIDeviceType>, + pub persons_data: Vec<MergeLeaf<Person>>, + pub locations_data: Vec<MergeLeaf<Location>>, + pub series_files: Vec<MergeLeaf<FileDataIO<MCUSID>>>, + pub device_files: Vec<MergeLeaf<FileDataIO<MCUDID>>>, +} +/*************************************************************************************************** + Series related queries +***************************************************************************************************/ + +/// Output of the get_series_data method (returns information of a (list of) serie(s)) +pub type GetSeriesDataOutput = Vec<DBSeriesDigRep>; + +#[allow(clippy::upper_case_acronyms)] +/// Output of the get_series_data method (returns information of a (list of) serie(s)) +#[cfg_attr(feature = "default_verbose", derive(DefaultVerbose))] +#[derive(Serialize, Deserialize, Default, Clone, Debug, Merge)] +pub struct DBSeriesDigRep { + pub data: SeriesDataIO, + pub device_type_data: MergeLeaf<MIDeviceType>, + pub files: Vec<MergeLeaf<FileDataIO<MCUSID>>>, +} + +/// Input of the add_series_data method (inserts new series into db) +pub type AddSeriesDataInput = Vec<SeriesDataIO>; diff --git a/src/db/types/mod.rs b/src/db/types/mod.rs new file mode 100644 index 0000000000000000000000000000000000000000..5f7f2b04545a51107406d16216ef7e198d6adb26 --- /dev/null +++ b/src/db/types/mod.rs @@ -0,0 +1,2 @@ +pub mod db_types; +pub mod io_types; diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000000000000000000000000000000000000..1e3ae1dd8ab51bdb6bda6110d675135dcd8d9c8d --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,25 @@ +/*! +//! The Metrology Cloud REST-API-Framework +//! provides common IO-Types +//! compiling without default features aka without `call_id_tokio` will only +//! compile types, which you can use for frontends esp. if they have to +//! compile for WebAssembly +!*/ + +// #![warn(missing_docs)] + +#[cfg(feature = "default_verbose")] +extern crate default_verbose; +extern crate phf; + +/// backend associated service stuff +pub mod backend; +/// db associated service stuff +pub mod db; +/// mc associated common service stuff +pub mod mc; + +pub const MC_SCHEMA_VERSION: &'static str = utils::crate_version!(); + +pub use dpsfw_types::backend::io_types as backend_io; +pub use dpsfw_types::db::io_types as db_io; diff --git a/src/mc/mod.rs b/src/mc/mod.rs new file mode 100644 index 0000000000000000000000000000000000000000..b6a64abfb8189bb7612e40a58321943be3d08b9b --- /dev/null +++ b/src/mc/mod.rs @@ -0,0 +1 @@ +pub mod uids; diff --git a/src/mc/uids/mcudid.rs b/src/mc/uids/mcudid.rs new file mode 100644 index 0000000000000000000000000000000000000000..25280852a542f61b025dd9d7814bed496506d705 --- /dev/null +++ b/src/mc/uids/mcudid.rs @@ -0,0 +1,173 @@ +use bytes::BytesMut; +use postgres_types::to_sql_checked; +use postgres_types::{FromSql as PGFromSql, IsNull, ToSql as PGToSql, Type as PGType}; +#[cfg(feature = "rusqlite_traits")] +use rusqlite::types::{FromSqlResult, ToSqlOutput, ValueRef}; +#[cfg(feature = "rusqlite_traits")] +use rusqlite::Result as SQLite_Result; +use serde::de::{Deserialize, Deserializer}; +use serde::ser::{Serialize, Serializer}; +use std::error::Error; +use std::fmt::{self, Display}; + +// use super::NodeID; +#[cfg(feature = "default_verbose")] +use default_verbose::{CommentedVerboseDefault, DefaultVerbose}; +#[cfg(feature = "default_verbose")] +use dp_proc_macros::DefaultVerbose; +use dpsfw_types::ctx; +use dpsfw_types::dp::uids::GeneralUID; +use dpsfw_types::error::PE; +use utils::log::{debug, warn}; + +#[cfg_attr(feature = "default_verbose", derive(DefaultVerbose))] +#[derive(Default, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct MCUDID { + pub test_data: bool, + pub node_id: String, + pub series_id: String, + pub serial: String, +} +impl TryFrom<&str> for MCUDID { + type Error = PE; + fn try_from(s: &str) -> Result<Self, PE> { + let mut parts: Vec<String> = s.split('.').map(|s| s.to_ascii_uppercase()).collect(); + let test_data = match parts.len() { + 3 => false, + 4 => true, + _ => return Err(PE::malformed_input("invalid MCUDID").warn(ctx!())), + }; + let serial = parts.pop().unwrap(); + if serial.is_empty() { + return Err(PE::malformed_input("invalid MCUDID (zero sized mid serial)").warn(ctx!())); + }; + let series_id = parts.pop().unwrap(); + if series_id.is_empty() { + return Err(PE::malformed_input("invalid MCUDID (zero sized series_id)").warn(ctx!())); + }; + let node_id = parts.pop().unwrap(); + if node_id.is_empty() { + return Err(PE::malformed_input("invalid MCUDID (zero sized node_id)").warn(ctx!())); + }; + Ok(MCUDID { + test_data, + serial, + series_id, + node_id, + }) + } +} +impl TryFrom<GeneralUID> for MCUDID { + type Error = PE; + fn try_from(input: GeneralUID) -> Result<Self, PE> { + if input.uid_type == super::MCUD_ID { + MCUDID::try_from(input.uid.as_str()) + } else { + Err(PE::malformed_input(format!( + "Wrong scheme {}, expected '{}'", + input.uid_type, + super::MCUD_ID + )) + .id_warn(ctx!())) + } + } +} +impl TryFrom<&GeneralUID> for MCUDID { + type Error = PE; + fn try_from(input: &GeneralUID) -> Result<Self, PE> { + if input.uid_type == super::MCUD_ID { + MCUDID::try_from(input.uid.as_str()) + } else { + Err(PE::malformed_input(format!( + "Wrong scheme {}, expected '{}'", + input.uid_type, + super::MCUD_ID + )) + .id_warn(ctx!())) + } + } +} +impl From<MCUDID> for GeneralUID { + fn from(input: MCUDID) -> Self { + GeneralUID { + uid: input.to_string(), + uid_type: String::from(super::MCUD_ID), + } + } +} +impl From<&MCUDID> for GeneralUID { + fn from(input: &MCUDID) -> Self { + GeneralUID { + uid: input.to_string(), + uid_type: String::from(super::MCUD_ID), + } + } +} +impl Display for MCUDID { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self.test_data { + true => write!(f, "t.{}.{}.{}", self.node_id, self.series_id, self.serial), + false => write!(f, "{}.{}.{}", self.node_id, self.series_id, self.serial), + } + } +} +#[cfg(feature = "rusqlite_traits")] +impl rusqlite::types::ToSql for MCUDID { + fn to_sql(&self) -> SQLite_Result<ToSqlOutput> { + Ok(ToSqlOutput::from(self.to_string())) + } +} +#[cfg(feature = "rusqlite_traits")] +impl rusqlite::types::FromSql for MCUDID { + fn column_result(value: ValueRef) -> FromSqlResult<Self> { + Ok(Self::try_from(value.as_str()?).unwrap()) + } +} +impl<'de> Deserialize<'de> for MCUDID { + fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> + where + D: Deserializer<'de>, + { + use serde::de::Error; + let x = String::deserialize(deserializer); + x.and_then(|string| MCUDID::try_from(string.as_str()).map_err(Error::custom)) + } +} +impl Serialize for MCUDID { + fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> + where + S: Serializer, + { + serializer.serialize_str(&format!("{}", self)) + } +} + +impl PGToSql for MCUDID { + fn to_sql( + &self, + ty: &PGType, + w: &mut BytesMut, + ) -> Result<IsNull, Box<dyn Error + Sync + Send>> { + <String as PGToSql>::to_sql(&self.to_string(), ty, w) + } + + fn accepts(ty: &PGType) -> bool { + <String as PGToSql>::accepts(ty) + } + + to_sql_checked!(); +} +impl PGFromSql<'_> for MCUDID { + fn from_sql(pg_type: &PGType, raw: &[u8]) -> Result<MCUDID, Box<dyn Error + Sync + Send>> { + let s = String::from_sql(pg_type, raw).map_err(|e| { + warn!("{}", e); + debug!("Got String: {:#?}", raw); + e + })?; + Self::try_from(s.as_str()).map_err(|e| Box::new(e) as Box<dyn Error + Send + Sync>) + } + + fn accepts(ty: &PGType) -> bool { + <String as PGFromSql>::accepts(ty) + } +} diff --git a/src/mc/uids/mcuid.rs b/src/mc/uids/mcuid.rs new file mode 100644 index 0000000000000000000000000000000000000000..0abf77ff1ebec6724b5328e4fedd6f27c5ce909d --- /dev/null +++ b/src/mc/uids/mcuid.rs @@ -0,0 +1,92 @@ +#[cfg(feature = "rusqlite_traits")] +use rusqlite::types::{FromSqlResult, ToSqlOutput, ValueRef}; +#[cfg(feature = "rusqlite_traits")] +use rusqlite::Result as SQLite_Result; +use serde::de::{Deserialize, Deserializer}; +use serde::ser::{Serialize, Serializer}; +use std::convert::TryFrom; +use std::fmt::{self, Display}; + +use super::{MCUDID, MCUSID}; +#[cfg(feature = "default_verbose")] +use default_verbose::{CommentedVerboseDefault, DefaultVerbose}; +use dpsfw_types::ctx; +use dpsfw_types::error::PE; + +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +/// types of metrology cloud unique IDs +#[deprecated = "Use GeneralUID"] +pub enum MCUID { + /// a measuring instrument device + Device(MCUDID), + /// a measuring instrument series + Series(MCUSID), +} +impl TryFrom<&str> for MCUID { + type Error = PE; + fn try_from(s: &str) -> Result<Self, PE> { + let parts_num = s.matches('.').count(); + match (parts_num, s.starts_with("t.")) { + (1, false) | (2, true) => MCUSID::try_from(s).map(MCUID::Series), + (2, false) | (3, true) => MCUDID::try_from(s).map(MCUID::Device), + _ => Err(PE::malformed_input("invalid MCUID").warn(ctx!())), + } + } +} +impl Display for MCUID { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + MCUID::Device(id) => id.fmt(f), + MCUID::Series(id) => id.fmt(f), + } + } +} +#[cfg(feature = "rusqlite_traits")] +impl rusqlite::types::ToSql for MCUID { + fn to_sql(&self) -> SQLite_Result<ToSqlOutput> { + match self { + MCUID::Device(id) => id.to_sql(), + MCUID::Series(id) => id.to_sql(), + } + } +} +#[cfg(feature = "rusqlite_traits")] +impl rusqlite::types::FromSql for MCUID { + fn column_result(value: ValueRef) -> FromSqlResult<Self> { + Ok(Self::try_from(value.as_str()?).unwrap()) + } +} +impl Default for MCUID { + fn default() -> Self { + MCUID::Device(MCUDID::default()) + } +} +#[cfg(feature = "default_verbose")] +impl DefaultVerbose for MCUID { + fn default_verbose() -> CommentedVerboseDefault<Self> { + let mut cdv = CommentedVerboseDefault::new(MCUID::Device(MCUDID::default_verbose().val)); + cdv.comments.push(( + "variants are either MCUSID (String.String) or MCUDID (String.String.String)", + "self".to_string(), + )); + cdv + } +} +impl<'de> Deserialize<'de> for MCUID { + fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> + where + D: Deserializer<'de>, + { + use serde::de::Error; + let x = String::deserialize(deserializer); + x.and_then(|string| MCUID::try_from(string.as_ref()).map_err(Error::custom)) + } +} +impl Serialize for MCUID { + fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> + where + S: Serializer, + { + serializer.serialize_str(&format!("{}", self)) + } +} diff --git a/src/mc/uids/mcusid.rs b/src/mc/uids/mcusid.rs new file mode 100644 index 0000000000000000000000000000000000000000..6d554af8a076c86a9dace9a9eeccccbdbf9e7037 --- /dev/null +++ b/src/mc/uids/mcusid.rs @@ -0,0 +1,177 @@ +use bytes::BytesMut; +use postgres_types::to_sql_checked; +use postgres_types::{FromSql as PGFromSql, IsNull, ToSql as PGToSql, Type as PGType}; +#[cfg(feature = "rusqlite_traits")] +use rusqlite::types::{FromSqlResult, ToSqlOutput, ValueRef}; +#[cfg(feature = "rusqlite_traits")] +use rusqlite::Result as SQLite_Result; +use serde::de::{Deserialize, Deserializer}; +use serde::ser::{Serialize, Serializer}; +use std::error::Error; +use std::fmt::{self, Display}; + +// use super::NodeID; +use super::MCUDID; +#[cfg(feature = "default_verbose")] +use default_verbose::{CommentedVerboseDefault, DefaultVerbose}; +#[cfg(feature = "default_verbose")] +use dp_proc_macros::DefaultVerbose; +use dpsfw_types::ctx; +use dpsfw_types::dp::uids::GeneralUID; +use dpsfw_types::error::PE; +use utils::log::{debug, warn}; + +#[cfg_attr(feature = "default_verbose", derive(DefaultVerbose))] +#[derive(Default, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct MCUSID { + pub test_data: bool, + pub node_id: String, + pub series_id: String, +} +impl TryFrom<&str> for MCUSID { + type Error = PE; + fn try_from(s: &str) -> Result<Self, PE> { + let mut parts: Vec<String> = s.split('.').map(|s| s.to_ascii_uppercase()).collect(); + let test_data = match parts.len() { + 2 => false, + 3 => true, + _ => return Err(PE::malformed_input("invalid MCUSID").warn(ctx!())), + }; + let series_id = parts.pop().unwrap(); + if series_id.is_empty() { + return Err(PE::malformed_input("invalid MCUSID (zero sized series_id)").warn(ctx!())); + }; + let node_id = parts.pop().unwrap(); + if node_id.is_empty() { + return Err(PE::malformed_input("invalid MCUSID (zero sized node_id)").warn(ctx!())); + }; + Ok(MCUSID { + test_data, + series_id, + node_id, + }) + } +} +impl TryFrom<GeneralUID> for MCUSID { + type Error = PE; + fn try_from(input: GeneralUID) -> Result<Self, PE> { + if input.uid_type == super::MCUS_ID { + MCUSID::try_from(input.uid.as_str()) + } else { + Err(PE::malformed_input(format!( + "Wrong scheme {}, expected '{}'", + input.uid_type, + super::MCUS_ID + )) + .id_warn(ctx!())) + } + } +} +impl TryFrom<&GeneralUID> for MCUSID { + type Error = PE; + fn try_from(input: &GeneralUID) -> Result<Self, PE> { + if input.uid_type == super::MCUS_ID { + MCUSID::try_from(input.uid.as_str()) + } else { + Err(PE::malformed_input(format!( + "Wrong scheme {}, expected '{}'", + input.uid_type, + super::MCUS_ID + )) + .id_warn(ctx!())) + } + } +} +impl From<MCUSID> for GeneralUID { + fn from(input: MCUSID) -> Self { + GeneralUID { + uid: input.to_string(), + uid_type: String::from(super::MCUS_ID), + } + } +} +impl From<&MCUSID> for GeneralUID { + fn from(input: &MCUSID) -> Self { + GeneralUID { + uid: input.to_string(), + uid_type: String::from(super::MCUD_ID), + } + } +} +impl Display for MCUSID { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self.test_data { + true => write!(f, "t.{}.{}", self.node_id, self.series_id), + false => write!(f, "{}.{}", self.node_id, self.series_id), + } + } +} +#[cfg(feature = "rusqlite_traits")] +impl rusqlite::types::ToSql for MCUSID { + fn to_sql(&self) -> SQLite_Result<ToSqlOutput> { + Ok(ToSqlOutput::from(self.to_string())) + } +} +#[cfg(feature = "rusqlite_traits")] +impl rusqlite::types::FromSql for MCUSID { + fn column_result(value: ValueRef) -> FromSqlResult<Self> { + Ok(Self::try_from(value.as_str()?).unwrap()) + } +} +impl From<MCUDID> for MCUSID { + fn from(dev: MCUDID) -> MCUSID { + MCUSID { + test_data: dev.test_data, + node_id: dev.node_id, + series_id: dev.series_id, + } + } +} +impl<'de> Deserialize<'de> for MCUSID { + fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> + where + D: Deserializer<'de>, + { + use serde::de::Error; + let x = String::deserialize(deserializer); + x.and_then(|string| MCUSID::try_from(string.as_str()).map_err(Error::custom)) + } +} +impl Serialize for MCUSID { + fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> + where + S: Serializer, + { + serializer.serialize_str(&format!("{}", self)) + } +} + +impl PGToSql for MCUSID { + fn to_sql( + &self, + ty: &PGType, + w: &mut BytesMut, + ) -> Result<IsNull, Box<dyn Error + Sync + Send>> { + <String as PGToSql>::to_sql(&self.to_string(), ty, w) + } + + fn accepts(ty: &PGType) -> bool { + <String as PGToSql>::accepts(ty) + } + + to_sql_checked!(); +} +impl PGFromSql<'_> for MCUSID { + fn from_sql(pg_type: &PGType, raw: &[u8]) -> Result<MCUSID, Box<dyn Error + Sync + Send>> { + let s = String::from_sql(pg_type, raw).map_err(|e| { + warn!("{}", e); + debug!("Got String: {:#?}", raw); + e + })?; + Self::try_from(s.as_str()).map_err(|e| Box::new(e) as Box<dyn Error + Send + Sync>) + } + + fn accepts(ty: &PGType) -> bool { + <String as PGFromSql>::accepts(ty) + } +} diff --git a/src/mc/uids/mod.rs b/src/mc/uids/mod.rs new file mode 100644 index 0000000000000000000000000000000000000000..7f6431e91e6de90c6b5ca741c053739b8acc4965 --- /dev/null +++ b/src/mc/uids/mod.rs @@ -0,0 +1,12 @@ +// // mcuid enum +// pub mod mcuid; +// pub use mcuid::MCUID; +// mc unique series IDs +pub mod mcusid; +pub use mcusid::MCUSID; +// mc unique device IDs +pub mod mcudid; +pub use mcudid::MCUDID; + +pub static MCUD_ID: &str = "mcudid"; +pub static MCUS_ID: &str = "mcusid";