diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
index 10ee70997b..7a82632fb2 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.yml
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -18,6 +18,18 @@ body:
validations:
required: true
+ - type: dropdown
+ id: version
+ attributes:
+ label: Major Version
+ description: Select the major version of Flipt that this bug is relevant to.
+ multiple: false
+ options:
+ - v1
+ - v2
+ validations:
+ required: true
+
- type: textarea
id: version-info
attributes:
@@ -61,13 +73,5 @@ body:
- OS
- Config file used
- - Database used (SQLite, MySQL, Postgres, etc.)
- - Screenshots
- - Exported data from the database (see below)
-
- - type: markdown
- attributes:
- value: |
- **Attach an Export**
-
- If the bug could be best shown with a specific set of data, please export your data by running `flipt export > export.yml` and then attach the YAML file to this issue. โค๏ธ
+ - Storage backend used (Filesystem, Database, Object, Git, OCI, etc.)
+ - Screenshots
\ No newline at end of file
diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml
index 32069935e9..80e17a2bba 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.yml
+++ b/.github/ISSUE_TEMPLATE/feature_request.yml
@@ -1,6 +1,6 @@
name: Feature request
-description: Suggest an idea for this project
-labels: ["enhancement"]
+description: Suggest an idea for this project (v2)
+labels: ["enhancement", "v2"]
body:
- type: markdown
@@ -8,6 +8,8 @@ body:
value: |
Thanks for taking the time to fill out this feature request!
+ Please note that new features will only be added to the v2 version of Flipt.
+
- type: textarea
id: problem
attributes:
@@ -26,6 +28,12 @@ body:
validations:
required: true
+ - type: checkbox
+ id: v2-only
+ attributes:
+ label: I understand that this feature will only be added to the v2 version of Flipt
+ required: true
+
- type: checkboxes
id: search
attributes:
@@ -38,19 +46,4 @@ body:
id: additional-context
attributes:
label: Additional Context
- description: Add any other context about the feature request here.
- placeholder: |
- Examples:
-
- - OS
- - Config file used
- - Database used (SQLite, MySQL, Postgres, etc.)
- - Screenshots
- - Exported data from the database (see below)
-
- - type: markdown
- attributes:
- value: |
- **Attach an Export**
-
- If your feature request could be best shown with a specific set of data, please export your data by running `flipt export > export.yml` and then attach the YAML file to this issue. โค๏ธ
+ description: Add any other context about the feature request here.
\ No newline at end of file
diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml
deleted file mode 100644
index 6422c592a4..0000000000
--- a/.github/workflows/benchmark.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-name: Benchmarks
-on:
- push:
- branches:
- - main
- pull_request:
- workflow_dispatch:
-
-env:
- GO_VERSION: "1.23"
-
-jobs:
- benchmark:
- name: Benchmark SQLite
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
-
- - uses: actions/setup-go@v5
- with:
- go-version: ${{ env.GO_VERSION }}
- check-latest: true
- cache: true
-
- - name: Run Benchmarks
- run: go test -run XXX -bench . -benchtime 5s -benchmem -short ./...
diff --git a/.github/workflows/integration-test.yml b/.github/workflows/integration-test.yml
index 399e0ddaad..62936a2279 100644
--- a/.github/workflows/integration-test.yml
+++ b/.github/workflows/integration-test.yml
@@ -39,11 +39,6 @@ jobs:
matrix:
test:
[
- "api/sqlite",
- "api/libsql",
- "api/postgres",
- "api/mysql",
- "api/cockroach",
"api/cache",
"api/cachetls",
"api/snapshot",
@@ -51,7 +46,6 @@ jobs:
"fs/git",
"fs/local",
"fs/s3",
- "fs/oci",
"fs/azblob",
"fs/gcs",
"import/export",
diff --git a/LICENSE b/LICENSE
index f288702d2f..31cd8582b4 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,674 +1,125 @@
- GNU GENERAL PUBLIC LICENSE
- Version 3, 29 June 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc.
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
- Preamble
-
- The GNU General Public License is a free, copyleft license for
-software and other kinds of works.
-
- The licenses for most software and other practical works are designed
-to take away your freedom to share and change the works. By contrast,
-the GNU General Public License is intended to guarantee your freedom to
-share and change all versions of a program--to make sure it remains free
-software for all its users. We, the Free Software Foundation, use the
-GNU General Public License for most of our software; it applies also to
-any other work released this way by its authors. You can apply it to
-your programs, too.
-
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-them if you wish), that you receive source code or can get it if you
-want it, that you can change the software or use pieces of it in new
-free programs, and that you know you can do these things.
-
- To protect your rights, we need to prevent others from denying you
-these rights or asking you to surrender the rights. Therefore, you have
-certain responsibilities if you distribute copies of the software, or if
-you modify it: responsibilities to respect the freedom of others.
-
- For example, if you distribute copies of such a program, whether
-gratis or for a fee, you must pass on to the recipients the same
-freedoms that you received. You must make sure that they, too, receive
-or can get the source code. And you must show them these terms so they
-know their rights.
-
- Developers that use the GNU GPL protect your rights with two steps:
-(1) assert copyright on the software, and (2) offer you this License
-giving you legal permission to copy, distribute and/or modify it.
-
- For the developers' and authors' protection, the GPL clearly explains
-that there is no warranty for this free software. For both users' and
-authors' sake, the GPL requires that modified versions be marked as
-changed, so that their problems will not be attributed erroneously to
-authors of previous versions.
-
- Some devices are designed to deny users access to install or run
-modified versions of the software inside them, although the manufacturer
-can do so. This is fundamentally incompatible with the aim of
-protecting users' freedom to change the software. The systematic
-pattern of such abuse occurs in the area of products for individuals to
-use, which is precisely where it is most unacceptable. Therefore, we
-have designed this version of the GPL to prohibit the practice for those
-products. If such problems arise substantially in other domains, we
-stand ready to extend this provision to those domains in future versions
-of the GPL, as needed to protect the freedom of users.
-
- Finally, every program is threatened constantly by software patents.
-States should not allow patents to restrict development and use of
-software on general-purpose computers, but in those that do, we wish to
-avoid the special danger that patents applied to a free program could
-make it effectively proprietary. To prevent this, the GPL assures that
-patents cannot be used to render the program non-free.
-
- The precise terms and conditions for copying, distribution and
-modification follow.
-
- TERMS AND CONDITIONS
-
- 0. Definitions.
-
- "This License" refers to version 3 of the GNU General Public License.
-
- "Copyright" also means copyright-like laws that apply to other kinds of
-works, such as semiconductor masks.
-
- "The Program" refers to any copyrightable work licensed under this
-License. Each licensee is addressed as "you". "Licensees" and
-"recipients" may be individuals or organizations.
-
- To "modify" a work means to copy from or adapt all or part of the work
-in a fashion requiring copyright permission, other than the making of an
-exact copy. The resulting work is called a "modified version" of the
-earlier work or a work "based on" the earlier work.
-
- A "covered work" means either the unmodified Program or a work based
-on the Program.
-
- To "propagate" a work means to do anything with it that, without
-permission, would make you directly or secondarily liable for
-infringement under applicable copyright law, except executing it on a
-computer or modifying a private copy. Propagation includes copying,
-distribution (with or without modification), making available to the
-public, and in some countries other activities as well.
-
- To "convey" a work means any kind of propagation that enables other
-parties to make or receive copies. Mere interaction with a user through
-a computer network, with no transfer of a copy, is not conveying.
-
- An interactive user interface displays "Appropriate Legal Notices"
-to the extent that it includes a convenient and prominently visible
-feature that (1) displays an appropriate copyright notice, and (2)
-tells the user that there is no warranty for the work (except to the
-extent that warranties are provided), that licensees may convey the
-work under this License, and how to view a copy of this License. If
-the interface presents a list of user commands or options, such as a
-menu, a prominent item in the list meets this criterion.
-
- 1. Source Code.
-
- The "source code" for a work means the preferred form of the work
-for making modifications to it. "Object code" means any non-source
-form of a work.
-
- A "Standard Interface" means an interface that either is an official
-standard defined by a recognized standards body, or, in the case of
-interfaces specified for a particular programming language, one that
-is widely used among developers working in that language.
-
- The "System Libraries" of an executable work include anything, other
-than the work as a whole, that (a) is included in the normal form of
-packaging a Major Component, but which is not part of that Major
-Component, and (b) serves only to enable use of the work with that
-Major Component, or to implement a Standard Interface for which an
-implementation is available to the public in source code form. A
-"Major Component", in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system
-(if any) on which the executable work runs, or a compiler used to
-produce the work, or an object code interpreter used to run it.
-
- The "Corresponding Source" for a work in object code form means all
-the source code needed to generate, install, and (for an executable
-work) run the object code and to modify the work, including scripts to
-control those activities. However, it does not include the work's
-System Libraries, or general-purpose tools or generally available free
-programs which are used unmodified in performing those activities but
-which are not part of the work. For example, Corresponding Source
-includes interface definition files associated with source files for
-the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require,
-such as by intimate data communication or control flow between those
-subprograms and other parts of the work.
-
- The Corresponding Source need not include anything that users
-can regenerate automatically from other parts of the Corresponding
-Source.
-
- The Corresponding Source for a work in source code form is that
-same work.
-
- 2. Basic Permissions.
-
- All rights granted under this License are granted for the term of
-copyright on the Program, and are irrevocable provided the stated
-conditions are met. This License explicitly affirms your unlimited
-permission to run the unmodified Program. The output from running a
-covered work is covered by this License only if the output, given its
-content, constitutes a covered work. This License acknowledges your
-rights of fair use or other equivalent, as provided by copyright law.
-
- You may make, run and propagate covered works that you do not
-convey, without conditions so long as your license otherwise remains
-in force. You may convey covered works to others for the sole purpose
-of having them make modifications exclusively for you, or provide you
-with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do
-not control copyright. Those thus making or running the covered works
-for you must do so exclusively on your behalf, under your direction
-and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
- Conveying under any other circumstances is permitted solely under
-the conditions stated below. Sublicensing is not allowed; section 10
-makes it unnecessary.
-
- 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
- No covered work shall be deemed part of an effective technological
-measure under any applicable law fulfilling obligations under article
-11 of the WIPO copyright treaty adopted on 20 December 1996, or
-similar laws prohibiting or restricting circumvention of such
-measures.
-
- When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention
-is effected by exercising rights under this License with respect to
-the covered work, and you disclaim any intention to limit operation or
-modification of the work as a means of enforcing, against the work's
-users, your or third parties' legal rights to forbid circumvention of
-technological measures.
-
- 4. Conveying Verbatim Copies.
-
- You may convey verbatim copies of the Program's source code as you
-receive it, in any medium, provided that you conspicuously and
-appropriately publish on each copy an appropriate copyright notice;
-keep intact all notices stating that this License and any
-non-permissive terms added in accord with section 7 apply to the code;
-keep intact all notices of the absence of any warranty; and give all
-recipients a copy of this License along with the Program.
-
- You may charge any price or no price for each copy that you convey,
-and you may offer support or warranty protection for a fee.
-
- 5. Conveying Modified Source Versions.
-
- You may convey a work based on the Program, or the modifications to
-produce it from the Program, in the form of source code under the
-terms of section 4, provided that you also meet all of these conditions:
-
- a) The work must carry prominent notices stating that you modified
- it, and giving a relevant date.
-
- b) The work must carry prominent notices stating that it is
- released under this License and any conditions added under section
- 7. This requirement modifies the requirement in section 4 to
- "keep intact all notices".
-
- c) You must license the entire work, as a whole, under this
- License to anyone who comes into possession of a copy. This
- License will therefore apply, along with any applicable section 7
- additional terms, to the whole of the work, and all its parts,
- regardless of how they are packaged. This License gives no
- permission to license the work in any other way, but it does not
- invalidate such permission if you have separately received it.
-
- d) If the work has interactive user interfaces, each must display
- Appropriate Legal Notices; however, if the Program has interactive
- interfaces that do not display Appropriate Legal Notices, your
- work need not make them do so.
-
- A compilation of a covered work with other separate and independent
-works, which are not by their nature extensions of the covered work,
-and which are not combined with it such as to form a larger program,
-in or on a volume of a storage or distribution medium, is called an
-"aggregate" if the compilation and its resulting copyright are not
-used to limit the access or legal rights of the compilation's users
-beyond what the individual works permit. Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
- 6. Conveying Non-Source Forms.
-
- You may convey a covered work in object code form under the terms
-of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License,
-in one of these ways:
-
- a) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by the
- Corresponding Source fixed on a durable physical medium
- customarily used for software interchange.
-
- b) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by a
- written offer, valid for at least three years and valid for as
- long as you offer spare parts or customer support for that product
- model, to give anyone who possesses the object code either (1) a
- copy of the Corresponding Source for all the software in the
- product that is covered by this License, on a durable physical
- medium customarily used for software interchange, for a price no
- more than your reasonable cost of physically performing this
- conveying of source, or (2) access to copy the
- Corresponding Source from a network server at no charge.
-
- c) Convey individual copies of the object code with a copy of the
- written offer to provide the Corresponding Source. This
- alternative is allowed only occasionally and noncommercially, and
- only if you received the object code with such an offer, in accord
- with subsection 6b.
-
- d) Convey the object code by offering access from a designated
- place (gratis or for a charge), and offer equivalent access to the
- Corresponding Source in the same way through the same place at no
- further charge. You need not require recipients to copy the
- Corresponding Source along with the object code. If the place to
- copy the object code is a network server, the Corresponding Source
- may be on a different server (operated by you or a third party)
- that supports equivalent copying facilities, provided you maintain
- clear directions next to the object code saying where to find the
- Corresponding Source. Regardless of what server hosts the
- Corresponding Source, you remain obligated to ensure that it is
- available for as long as needed to satisfy these requirements.
-
- e) Convey the object code using peer-to-peer transmission, provided
- you inform other peers where the object code and Corresponding
- Source of the work are being offered to the general public at no
- charge under subsection 6d.
-
- A separable portion of the object code, whose source code is excluded
-from the Corresponding Source as a System Library, need not be
-included in conveying the object code work.
-
- A "User Product" is either (1) a "consumer product", which means any
-tangible personal property which is normally used for personal, family,
-or household purposes, or (2) anything designed or sold for incorporation
-into a dwelling. In determining whether a product is a consumer product,
-doubtful cases shall be resolved in favor of coverage. For a particular
-product received by a particular user, "normally used" refers to a
-typical or common use of that class of product, regardless of the status
-of the particular user or of the way in which the particular user
-actually uses, or expects or is expected to use, the product. A product
-is a consumer product regardless of whether the product has substantial
-commercial, industrial or non-consumer uses, unless such uses represent
-the only significant mode of use of the product.
-
- "Installation Information" for a User Product means any methods,
-procedures, authorization keys, or other information required to install
-and execute modified versions of a covered work in that User Product from
-a modified version of its Corresponding Source. The information must
-suffice to ensure that the continued functioning of the modified object
-code is in no case prevented or interfered with solely because
-modification has been made.
-
- If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as
-part of a transaction in which the right of possession and use of the
-User Product is transferred to the recipient in perpetuity or for a
-fixed term (regardless of how the transaction is characterized), the
-Corresponding Source conveyed under this section must be accompanied
-by the Installation Information. But this requirement does not apply
-if neither you nor any third party retains the ability to install
-modified object code on the User Product (for example, the work has
-been installed in ROM).
-
- The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates
-for a work that has been modified or installed by the recipient, or for
-the User Product in which it has been modified or installed. Access to a
-network may be denied when the modification itself materially and
-adversely affects the operation of the network or violates the rules and
-protocols for communication across the network.
-
- Corresponding Source conveyed, and Installation Information provided,
-in accord with this section must be in a format that is publicly
-documented (and with an implementation available to the public in
-source code form), and must require no special password or key for
-unpacking, reading or copying.
-
- 7. Additional Terms.
-
- "Additional permissions" are terms that supplement the terms of this
-License by making exceptions from one or more of its conditions.
-Additional permissions that are applicable to the entire Program shall
-be treated as though they were included in this License, to the extent
-that they are valid under applicable law. If additional permissions
-apply only to part of the Program, that part may be used separately
-under those permissions, but the entire Program remains governed by
-this License without regard to the additional permissions.
-
- When you convey a copy of a covered work, you may at your option
-remove any additional permissions from that copy, or from any part of
-it. (Additional permissions may be written to require their own
-removal in certain cases when you modify the work.) You may place
-additional permissions on material, added by you to a covered work,
-for which you have or can give appropriate copyright permission.
-
- Notwithstanding any other provision of this License, for material you
-add to a covered work, you may (if authorized by the copyright holders of
-that material) supplement the terms of this License with terms:
-
- a) Disclaiming warranty or limiting liability differently from the
- terms of sections 15 and 16 of this License; or
-
- b) Requiring preservation of specified reasonable legal notices or
- author attributions in that material or in the Appropriate Legal
- Notices displayed by works containing it; or
-
- c) Prohibiting misrepresentation of the origin of that material, or
- requiring that modified versions of such material be marked in
- reasonable ways as different from the original version; or
-
- d) Limiting the use for publicity purposes of names of licensors or
- authors of the material; or
-
- e) Declining to grant rights under trademark law for use of some
- trade names, trademarks, or service marks; or
-
- f) Requiring indemnification of licensors and authors of that
- material by anyone who conveys the material (or modified versions of
- it) with contractual assumptions of liability to the recipient, for
- any liability that these contractual assumptions directly impose on
- those licensors and authors.
-
- All other non-permissive additional terms are considered "further
-restrictions" within the meaning of section 10. If the Program as you
-received it, or any part of it, contains a notice stating that it is
-governed by this License along with a term that is a further
-restriction, you may remove that term. If a license document contains
-a further restriction but permits relicensing or conveying under this
-License, you may add to a covered work material governed by the terms
-of that license document, provided that the further restriction does
-not survive such relicensing or conveying.
-
- If you add terms to a covered work in accord with this section, you
-must place, in the relevant source files, a statement of the
-additional terms that apply to those files, or a notice indicating
-where to find the applicable terms.
-
- Additional terms, permissive or non-permissive, may be stated in the
-form of a separately written license, or stated as exceptions;
-the above requirements apply either way.
-
- 8. Termination.
-
- You may not propagate or modify a covered work except as expressly
-provided under this License. Any attempt otherwise to propagate or
-modify it is void, and will automatically terminate your rights under
-this License (including any patent licenses granted under the third
-paragraph of section 11).
-
- However, if you cease all violation of this License, then your
-license from a particular copyright holder is reinstated (a)
-provisionally, unless and until the copyright holder explicitly and
-finally terminates your license, and (b) permanently, if the copyright
-holder fails to notify you of the violation by some reasonable means
-prior to 60 days after the cessation.
-
- Moreover, your license from a particular copyright holder is
-reinstated permanently if the copyright holder notifies you of the
-violation by some reasonable means, this is the first time you have
-received notice of violation of this License (for any work) from that
-copyright holder, and you cure the violation prior to 30 days after
-your receipt of the notice.
-
- Termination of your rights under this section does not terminate the
-licenses of parties who have received copies or rights from you under
-this License. If your rights have been terminated and not permanently
-reinstated, you do not qualify to receive new licenses for the same
-material under section 10.
-
- 9. Acceptance Not Required for Having Copies.
-
- You are not required to accept this License in order to receive or
-run a copy of the Program. Ancillary propagation of a covered work
-occurring solely as a consequence of using peer-to-peer transmission
-to receive a copy likewise does not require acceptance. However,
-nothing other than this License grants you permission to propagate or
-modify any covered work. These actions infringe copyright if you do
-not accept this License. Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
- 10. Automatic Licensing of Downstream Recipients.
-
- Each time you convey a covered work, the recipient automatically
-receives a license from the original licensors, to run, modify and
-propagate that work, subject to this License. You are not responsible
-for enforcing compliance by third parties with this License.
-
- An "entity transaction" is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations. If propagation of a covered
-work results from an entity transaction, each party to that
-transaction who receives a copy of the work also receives whatever
-licenses to the work the party's predecessor in interest had or could
-give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if
-the predecessor has it or can get it with reasonable efforts.
-
- You may not impose any further restrictions on the exercise of the
-rights granted or affirmed under this License. For example, you may
-not impose a license fee, royalty, or other charge for exercise of
-rights granted under this License, and you may not initiate litigation
-(including a cross-claim or counterclaim in a lawsuit) alleging that
-any patent claim is infringed by making, using, selling, offering for
-sale, or importing the Program or any portion of it.
-
- 11. Patents.
-
- A "contributor" is a copyright holder who authorizes use under this
-License of the Program or a work on which the Program is based. The
-work thus licensed is called the contributor's "contributor version".
-
- A contributor's "essential patent claims" are all patent claims
-owned or controlled by the contributor, whether already acquired or
-hereafter acquired, that would be infringed by some manner, permitted
-by this License, of making, using, or selling its contributor version,
-but do not include claims that would be infringed only as a
-consequence of further modification of the contributor version. For
-purposes of this definition, "control" includes the right to grant
-patent sublicenses in a manner consistent with the requirements of
-this License.
-
- Each contributor grants you a non-exclusive, worldwide, royalty-free
-patent license under the contributor's essential patent claims, to
-make, use, sell, offer for sale, import and otherwise run, modify and
-propagate the contents of its contributor version.
-
- In the following three paragraphs, a "patent license" is any express
-agreement or commitment, however denominated, not to enforce a patent
-(such as an express permission to practice a patent or covenant not to
-sue for patent infringement). To "grant" such a patent license to a
-party means to make such an agreement or commitment not to enforce a
-patent against the party.
-
- If you convey a covered work, knowingly relying on a patent license,
-and the Corresponding Source of the work is not available for anyone
-to copy, free of charge and under the terms of this License, through a
-publicly available network server or other readily accessible means,
-then you must either (1) cause the Corresponding Source to be so
-available, or (2) arrange to deprive yourself of the benefit of the
-patent license for this particular work, or (3) arrange, in a manner
-consistent with the requirements of this License, to extend the patent
-license to downstream recipients. "Knowingly relying" means you have
-actual knowledge that, but for the patent license, your conveying the
-covered work in a country, or your recipient's use of the covered work
-in a country, would infringe one or more identifiable patents in that
-country that you have reason to believe are valid.
-
- If, pursuant to or in connection with a single transaction or
-arrangement, you convey, or propagate by procuring conveyance of, a
-covered work, and grant a patent license to some of the parties
-receiving the covered work authorizing them to use, propagate, modify
-or convey a specific copy of the covered work, then the patent license
-you grant is automatically extended to all recipients of the covered
-work and works based on it.
-
- A patent license is "discriminatory" if it does not include within
-the scope of its coverage, prohibits the exercise of, or is
-conditioned on the non-exercise of one or more of the rights that are
-specifically granted under this License. You may not convey a covered
-work if you are a party to an arrangement with a third party that is
-in the business of distributing software, under which you make payment
-to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory
-patent license (a) in connection with copies of the covered work
-conveyed by you (or copies made from those copies), or (b) primarily
-for and in connection with specific products or compilations that
-contain the covered work, unless you entered into that arrangement,
-or that patent license was granted, prior to 28 March 2007.
-
- Nothing in this License shall be construed as excluding or limiting
-any implied license or other defenses to infringement that may
-otherwise be available to you under applicable patent law.
-
- 12. No Surrender of Others' Freedom.
-
- If conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot convey a
-covered work so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you may
-not convey it at all. For example, if you agree to terms that obligate you
-to collect a royalty for further conveying from those to whom you convey
-the Program, the only way you could satisfy both those terms and this
-License would be to refrain entirely from conveying the Program.
-
- 13. Use with the GNU Affero General Public License.
-
- Notwithstanding any other provision of this License, you have
-permission to link or combine any covered work with a work licensed
-under version 3 of the GNU Affero General Public License into a single
-combined work, and to convey the resulting work. The terms of this
-License will continue to apply to the part which is the covered work,
-but the special requirements of the GNU Affero General Public License,
-section 13, concerning interaction through a network will apply to the
-combination as such.
-
- 14. Revised Versions of this License.
-
- The Free Software Foundation may publish revised and/or new versions of
-the GNU General Public License from time to time. Such new versions will
-be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
- Each version is given a distinguishing version number. If the
-Program specifies that a certain numbered version of the GNU General
-Public License "or any later version" applies to it, you have the
-option of following the terms and conditions either of that numbered
-version or of any later version published by the Free Software
-Foundation. If the Program does not specify a version number of the
-GNU General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
- If the Program specifies that a proxy can decide which future
-versions of the GNU General Public License can be used, that proxy's
-public statement of acceptance of a version permanently authorizes you
-to choose that version for the Program.
-
- Later license versions may give you additional or different
-permissions. However, no additional obligations are imposed on any
-author or copyright holder as a result of your choosing to follow a
-later version.
-
- 15. Disclaimer of Warranty.
-
- THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
-APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
-HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
-OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
-IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
-ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
- 16. Limitation of Liability.
-
- IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
-THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
-GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
-USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
-DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
-PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
-EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGES.
-
- 17. Interpretation of Sections 15 and 16.
-
- If the disclaimer of warranty and limitation of liability provided
-above cannot be given local legal effect according to their terms,
-reviewing courts shall apply local law that most closely approximates
-an absolute waiver of all civil liability in connection with the
-Program, unless a warranty or assumption of liability accompanies a
-copy of the Program in return for a fee.
-
- END OF TERMS AND CONDITIONS
-
- How to Apply These Terms to Your New Programs
-
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-state the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
-
- Copyright (C)
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see .
-
-Also add information on how to contact you by electronic and paper mail.
-
- If the program does terminal interaction, make it output a short
-notice like this when it starts in an interactive mode:
-
- Copyright (C)
- This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w' and `show c' should show the appropriate
-parts of the General Public License. Of course, your program's commands
-might be different; for a GUI interface, you would use an "about box".
-
- You should also get your employer (if you work as a programmer) or school,
-if any, to sign a "copyright disclaimer" for the program, if necessary.
-For more information on this, and how to apply and follow the GNU GPL, see
- .
-
- The GNU General Public License does not permit incorporating your program
-into proprietary programs. If your program is a subroutine library, you
-may consider it more useful to permit linking proprietary applications with
-the library. If this is what you want to do, use the GNU Lesser General
-Public License instead of this License. But first, please read
-.
+# Fair Core License, Version 1.0, MIT Future License
+
+## Abbreviation
+
+FCL-1.0-MIT
+
+## Notice
+
+Copyright 2025 Flipt Software, Inc.
+
+## Terms and Conditions
+
+### Licensor ("We")
+
+The party offering the Software under these Terms and Conditions.
+
+### The Software
+
+The "Software" is each version of the software that we make available under
+these Terms and Conditions, as indicated by our inclusion of these Terms and
+Conditions with the Software.
+
+### License Grant
+
+Subject to your compliance with this License Grant and the Limitations,
+Patents, Redistribution and Trademark clauses below, we hereby grant you the
+right to use, copy, modify, create derivative works, publicly perform, publicly
+display and redistribute the Software for any Permitted Purpose identified
+below.
+
+### Permitted Purpose
+
+A Permitted Purpose is any purpose other than a Competing Use. A Competing Use
+means making the Software available to others in a commercial product or
+service that:
+
+1. substitutes for the Software;
+
+2. substitutes for any other product or service we offer using the Software
+ that exists as of the date we make the Software available; or
+
+3. offers the same or substantially similar functionality as the Software.
+
+Permitted Purposes specifically include using the Software:
+
+1. for your internal use and access;
+
+2. for non-commercial education;
+
+3. for non-commercial research; and
+
+4. in connection with professional services that you provide to a licensee
+ using the Software in accordance with these Terms and Conditions.
+
+### Limitations
+
+You must not move, change, disable, or circumvent the license key functionality
+in the Software; or modify any portion of the Software protected by the license
+key to:
+
+1. enable access to the protected functionality without a valid license key; or
+
+2. remove the protected functionality.
+
+### Patents
+
+To the extent your use for a Permitted Purpose would necessarily infringe our
+patents, the license grant above includes a license under our patents. If you
+make a claim against any party that the Software infringes or contributes to
+the infringement of any patent, then your patent license to the Software ends
+immediately.
+
+### Redistribution
+
+The Terms and Conditions apply to all copies, modifications and derivatives of
+the Software.
+
+If you redistribute any copies, modifications or derivatives of the Software,
+you must include a copy of or a link to these Terms and Conditions and not
+remove any copyright or other proprietary notices provided in or with the
+Software.
+
+### Disclaimer
+
+THE SOFTWARE IS PROVIDED "AS IS" AND WITHOUT WARRANTIES OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING WITHOUT LIMITATION WARRANTIES OF FITNESS FOR A PARTICULAR
+PURPOSE, MERCHANTABILITY, TITLE OR NON-INFRINGEMENT.
+
+IN NO EVENT WILL WE HAVE ANY LIABILITY TO YOU ARISING OUT OF OR RELATED TO THE
+SOFTWARE, INCLUDING INDIRECT, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES,
+EVEN IF WE HAVE BEEN INFORMED OF THEIR POSSIBILITY IN ADVANCE.
+
+In the event the provision of this Disclaimer section is unenforceable under
+applicable law, the licenses granted herein are void.
+
+### Trademarks
+
+Except for displaying the License Details and identifying us as the origin of
+the Software, you have no right under these Terms and Conditions to use our
+trademarks, trade names, service marks or product names.
+
+## Grant of Future License
+
+We hereby irrevocably grant you an additional license to use the Software,
+under the MIT license, that is effective on the second anniversary of the date
+we make the Software available. On or after that date, you may use the Software
+under the MIT license, in which case the following will apply:
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/PLAN.md b/PLAN.md
new file mode 100644
index 0000000000..6ff27668f0
--- /dev/null
+++ b/PLAN.md
@@ -0,0 +1,47 @@
+## Development Plan
+
+This document is a high-level plan for the development of Flipt v2. This is a working document and will be updated as we progress.
+
+Please note that this is a high-level plan and will be refined as we progress.
+
+If you'd like to discuss this plan or add any additional ideas, please open an issue and tag it with `v2`.
+
+### Goals
+
+- Remove database dependencies for storing flag state and instead use a declarative storage backend
+- Support GitOps workflows completely, including write operations
+- Maintain compatibility with the current Flipt Evaluation APIs
+- Consolidate some configuration options and remove some that are no longer needed
+- Support authentication backends such as in-memory and Redis only
+- Create new declarative API for managing flag configuration
+- Remove legacy evaluation APIs
+- Make UI improvements where needed
+- Tackle any existing v1 issues that could be resolved by v2
+- (Optional) Support write operations to object storage backends
+- (Optional) Support approval git-based workflows
+
+### Non-Goals
+
+- Maintain compatibility with the current Flipt Management APIs
+- Maintain backward compatibility with configuration files from v1
+- Change v1 base types (flags, segments, etc) as this would require new evaluation APIs
+
+## TODO
+
+- [ ] Implement new declarative API for managing flag configuration
+- [ ] Update UI to support new API
+- [ ] Remove legacy evaluation APIs
+- [ ] Remove database dependencies except for analytics
+- [ ] Implement Redis and in-memory authentication backends
+- [ ] Refactor and consolidate configuration options
+- [ ] Fix and improve unit test coverage
+- [ ] Fix and improve integration test coverage
+- [ ] Package and release
+ - [ ] Binary
+ - [ ] Docker image
+ - [ ] Helm chart
+ - [ ] Homebrew tap
+- [ ] Documentation
+ - [ ] Create v2 docs site
+ - [ ] Migrate applicable docs from v1
+- [ ] Update examples
diff --git a/README.md b/README.md
index d7033b40f1..45c7dc2f4b 100644
--- a/README.md
+++ b/README.md
@@ -4,7 +4,7 @@
-An enterprise-ready, GitOps enabled, CloudNative, feature management solution
+An enterprise-ready, GitOps and CloudNative, feature management solution
@@ -33,12 +33,6 @@
-
-
-
-
-
-
@@ -52,6 +46,11 @@
+> [!IMPORTANT]
+> This branch is a work in progress for a v2 version of Flipt. This is not a stable release and should not be used in production. The v2 branch is a major refactor of the codebase with the goal of support Git and object storage as the primary storage backends. See [PLAN.md](PLAN.md) for more information.
+
+
+
[Flipt](https://www.flipt.io) enables you to follow DevOps best practices and separate releases from deployments. Built with high-performance engineering organizations in mind.
Flipt can be deployed within your existing infrastructure so that you don't have to worry about your information being sent to a third party or the latency required to communicate across the internet.
@@ -84,7 +83,7 @@ Flipt supports use cases such as:
- ๐๏ธ **Control** - No data leaves your servers and you don't have to open your systems to the outside world to communicate with Flipt. It all runs within your existing infrastructure.
- ๐ **Speed** - Since Flipt is co-located with your existing services, you do not have to communicate across the internet which can add excessive latency and slow down your applications.
- โ
**Simplicity** - Flipt is a single binary with no external dependencies by default.
-- ๐ **Compatibility** - GRPC, REST, MySQL, Postgres, CockroachDB, SQLite, LibSQL, Redis, ClickHouse, Prometheus, OpenTelemetry, and more.
+- ๐ **Compatibility** - GRPC, REST, MySQL, Postgres, SQLite, Redis, ClickHouse, Prometheus, OpenTelemetry, and more.
@@ -93,12 +92,11 @@ Flipt supports use cases such as:
- Stand-alone, single binary that's easy to run and [configure](https://www.flipt.io/docs/configuration/overview)
- Ability to create advanced distribution rules to target segments of users
- Modern UI and debug console with dark mode ๐
-- Import and export to allow storing your data as code
- Works with [Prometheus](https://prometheus.io/) and [OpenTelemetry](https://opentelemetry.io/) out of the box ๐
- CloudNative [Filesystem, Object, Git, and OCI declarative storage backends](https://www.flipt.io/docs/configuration/storage#declarative) to support GitOps workflows and more.
- Audit logging with Webhook support to track changes to your data
-Are we missing a feature that you'd like to see? [Let us know!](https://features.flipt.io)
+Are we missing a feature that you'd like to see? [Let us know by opening an issue!](https://github.com/flipt-io/flipt/issues)
@@ -108,8 +106,6 @@ We would love your help! Before submitting a PR, please read over the [Contribut
No contribution is too small, whether it be bug reports/fixes, feature requests, documentation updates, or anything else that can help drive the project forward.
-Check out our [public roadmap](https://github.com/orgs/flipt-io/projects/4) to see what we're working on and where you can help.
-
Not sure how to get started? You can:
- [Book a pairing session/code walkthrough](https://calendly.com/flipt-mark/30) with one of our teammates!
@@ -120,13 +116,6 @@ Not sure how to get started? You can:
- [Backend](https://github.com/flipt-io/flipt/issues?q=is%3Aissue+is%3Aopen+label%3Ago)
- [Frontend](https://github.com/flipt-io/flipt/issues?q=is%3Aopen+is%3Aissue+label%3Aui)
-- Looking for issues by effort? We've got you covered:
- - [XS](https://github.com/flipt-io/flipt/issues?q=is%3Aissue+is%3Aopen+label%3Axs)
- - [Small](https://github.com/flipt-io/flipt/issues?q=is%3Aissue+is%3Aopen+label%3Asm)
- - [Medium](https://github.com/flipt-io/flipt/issues?q=is%3Aissue+is%3Aopen+label%3Amd)
- - [Large](https://github.com/flipt-io/flipt/issues?q=is%3Aissue+is%3Aopen+label%3Alg)
- - [XL](https://github.com/flipt-io/flipt/issues?q=is%3Aissue+is%3Aopen+label%3Axl)
-
Review the [Architecture](ARCHITECTURE.md) and [Development](DEVELOPMENT.md) documentation for more information on how Flipt works.
@@ -137,7 +126,7 @@ For help and discussion around Flipt, feature flag best practices, and more, joi
-## Try It
+
## Supports
-
-
-
-
-
-
-
@@ -307,103 +290,6 @@ The client code is the code that you would integrate into your applications, whi
### Server License
-The server code is licensed under the [GPL 3.0 License](https://spdx.org/licenses/GPL-3.0.html).
-
-See [LICENSE](LICENSE).
-
-
+The server code is licensed under the [Fair Core License, Version 1.0, MIT Future License](https://github.com/flipt-io/flipt/blob/main/LICENSE).
-## Contributors โจ
-
-Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
-
-
-
-
-
-
-
-
-
-
-
-This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome!
+See [fcl.dev](https://fcl.dev) for more information.
\ No newline at end of file
diff --git a/build/testing/integration.go b/build/testing/integration.go
index bb8ef9c0e7..18f5c6029a 100644
--- a/build/testing/integration.go
+++ b/build/testing/integration.go
@@ -51,11 +51,6 @@ var (
// AllCases are the top-level filterable integration test cases.
AllCases = map[string]testCaseFn{
- "api/sqlite": withSQLite(api),
- "api/libsql": withLibSQL(api),
- "api/postgres": withPostgres(api),
- "api/mysql": withMySQL(api),
- "api/cockroach": withCockroach(api),
"api/cache": cache,
"api/cachetls": cacheWithTLS,
"api/snapshot": withAuthz(snapshot),
@@ -292,71 +287,6 @@ func ofrep(ctx context.Context, _ *dagger.Client, base, flipt *dagger.Container,
return suite(ctx, "ofrep", base, flipt.WithExec(nil), conf)
}
-func withSQLite(fn testCaseFn) testCaseFn {
- return fn
-}
-
-func withLibSQL(fn testCaseFn) testCaseFn {
- return func(ctx context.Context, client *dagger.Client, base, flipt *dagger.Container, conf testConfig) func() error {
- return fn(ctx, client, base, flipt.WithEnvVariable("FLIPT_DB_URL", "libsql://file:/etc/flipt/flipt.db"), conf)
- }
-}
-
-func withPostgres(fn testCaseFn) testCaseFn {
- return func(ctx context.Context, client *dagger.Client, base, flipt *dagger.Container, conf testConfig) func() error {
- return fn(ctx, client, base, flipt.
- WithEnvVariable("FLIPT_DB_URL", "postgres://postgres:password@postgres:5432?sslmode=disable").
- WithServiceBinding("postgres", client.Container().
- From("postgres:alpine").
- WithEnvVariable("POSTGRES_PASSWORD", "password").
- WithExposedPort(5432).
- WithEnvVariable("UNIQUE", uuid.New().String()).
- AsService()),
- conf,
- )
- }
-}
-
-func withMySQL(fn testCaseFn) testCaseFn {
- return func(ctx context.Context, client *dagger.Client, base, flipt *dagger.Container, conf testConfig) func() error {
- return fn(ctx, client, base, flipt.
- WithEnvVariable(
- "FLIPT_DB_URL",
- "mysql://flipt:password@mysql:3306/flipt_test?multiStatements=true",
- ).
- WithServiceBinding("mysql", client.Container().
- From("mysql:8").
- WithEnvVariable("MYSQL_USER", "flipt").
- WithEnvVariable("MYSQL_PASSWORD", "password").
- WithEnvVariable("MYSQL_DATABASE", "flipt_test").
- WithEnvVariable("MYSQL_ALLOW_EMPTY_PASSWORD", "true").
- WithEnvVariable("UNIQUE", uuid.New().String()).
- WithExposedPort(3306).
- AsService()),
- conf,
- )
- }
-}
-
-func withCockroach(fn testCaseFn) testCaseFn {
- return func(ctx context.Context, client *dagger.Client, base, flipt *dagger.Container, conf testConfig) func() error {
- return fn(ctx, client, base, flipt.
- WithEnvVariable("FLIPT_DB_URL", "cockroachdb://root@cockroach:26257/defaultdb?sslmode=disable").
- WithServiceBinding("cockroach", client.Container().
- From("cockroachdb/cockroach:latest-v21.2").
- WithEnvVariable("COCKROACH_USER", "root").
- WithEnvVariable("COCKROACH_DATABASE", "defaultdb").
- WithEnvVariable("UNIQUE", uuid.New().String()).
- WithExposedPort(26257).
- WithExec(
- []string{"start-single-node", "--single-node", "--insecure", "--store=type=mem,size=0.7Gb", "--accept-sql-without-tls", "--logtostderr=ERROR"},
- dagger.ContainerWithExecOpts{UseEntrypoint: true}).
- AsService()),
- conf,
- )
- }
-}
-
func cache(ctx context.Context, _ *dagger.Client, base, flipt *dagger.Container, conf testConfig) func() error {
flipt = flipt.
WithEnvVariable("FLIPT_LOG_LEVEL", "WARN").
diff --git a/cmd/flipt/bundle.go b/cmd/flipt/bundle.go
deleted file mode 100644
index eb2464f325..0000000000
--- a/cmd/flipt/bundle.go
+++ /dev/null
@@ -1,199 +0,0 @@
-package main
-
-import (
- "context"
- "fmt"
- "os"
- "text/tabwriter"
-
- "oras.land/oras-go/v2"
-
- "github.com/spf13/cobra"
- "go.flipt.io/flipt/internal/config"
- "go.flipt.io/flipt/internal/containers"
- "go.flipt.io/flipt/internal/oci"
-)
-
-type bundleCommand struct{}
-
-func newBundleCommand() *cobra.Command {
- bundle := &bundleCommand{}
-
- cmd := &cobra.Command{
- Use: "bundle",
- Short: "Manage Flipt bundles",
- }
-
- cmd.AddCommand(&cobra.Command{
- Use: "build [flags] ",
- Short: "Build a bundle",
- RunE: bundle.build,
- Args: cobra.ExactArgs(1),
- })
-
- cmd.AddCommand(&cobra.Command{
- Use: "list [flags]",
- Short: "List all bundles",
- RunE: bundle.list,
- })
-
- cmd.AddCommand(&cobra.Command{
- Use: "push [flags] ",
- Short: "Push local bundle to remote",
- RunE: bundle.push,
- Args: cobra.ExactArgs(2),
- })
-
- cmd.AddCommand(&cobra.Command{
- Use: "pull [flags] ",
- Short: "Pull a remote bundle",
- RunE: bundle.pull,
- Args: cobra.ExactArgs(1),
- })
-
- return cmd
-}
-
-func (c *bundleCommand) build(cmd *cobra.Command, args []string) error {
- ctx := cmd.Context()
- store, err := c.getStore(ctx)
- if err != nil {
- return err
- }
-
- ref, err := oci.ParseReference(args[0])
- if err != nil {
- return err
- }
-
- bundle, err := store.Build(cmd.Context(), os.DirFS("."), ref)
- if err != nil {
- return err
- }
-
- fmt.Println(bundle.Digest)
-
- return nil
-}
-
-func (c *bundleCommand) list(cmd *cobra.Command, args []string) error {
- ctx := cmd.Context()
- store, err := c.getStore(ctx)
- if err != nil {
- return err
- }
-
- bundles, err := store.List(cmd.Context())
- if err != nil {
- return err
- }
-
- wr := writer()
-
- fmt.Fprintf(wr, "DIGEST\tREPO\tTAG\tCREATED\t\n")
- for _, bundle := range bundles {
- fmt.Fprintf(wr, "%s\t%s\t%s\t%s\t\n", bundle.Digest.Hex()[:7], bundle.Repository, bundle.Tag, bundle.CreatedAt)
- }
-
- return wr.Flush()
-}
-
-func (c *bundleCommand) push(cmd *cobra.Command, args []string) error {
- ctx := cmd.Context()
- store, err := c.getStore(ctx)
- if err != nil {
- return err
- }
-
- src, err := oci.ParseReference(args[0])
- if err != nil {
- return err
- }
-
- dst, err := oci.ParseReference(args[1])
- if err != nil {
- return err
- }
-
- bundle, err := store.Copy(cmd.Context(), src, dst)
- if err != nil {
- return err
- }
-
- fmt.Println(bundle.Digest)
-
- return nil
-}
-
-func (c *bundleCommand) pull(cmd *cobra.Command, args []string) error {
- ctx := cmd.Context()
- store, err := c.getStore(ctx)
- if err != nil {
- return err
- }
-
- src, err := oci.ParseReference(args[0])
- if err != nil {
- return err
- }
-
- // copy source into destination and rewrite
- // to reference the local equivalent name
- dst := src
- dst.Registry = "local"
- dst.Scheme = "flipt"
-
- bundle, err := store.Copy(cmd.Context(), src, dst)
- if err != nil {
- return err
- }
-
- fmt.Println(bundle.Digest)
-
- return nil
-}
-
-func (c *bundleCommand) getStore(ctx context.Context) (*oci.Store, error) {
- logger, cfg, err := buildConfig(ctx)
- if err != nil {
- return nil, err
- }
-
- dir, err := config.DefaultBundleDir()
- if err != nil {
- return nil, err
- }
-
- var opts []containers.Option[oci.StoreOptions]
- if cfg := cfg.Storage.OCI; cfg != nil {
- if cfg.Authentication != nil {
- if !cfg.Authentication.Type.IsValid() {
- cfg.Authentication.Type = oci.AuthenticationTypeStatic
- }
- opt, err := oci.WithCredentials(
- cfg.Authentication.Type,
- cfg.Authentication.Username,
- cfg.Authentication.Password,
- )
- if err != nil {
- return nil, err
- }
- opts = append(opts, opt)
- }
-
- // The default is the 1.1 version, this is why we don't need to check it in here.
- if cfg.ManifestVersion == config.OCIManifestVersion10 {
- opts = append(opts, oci.WithManifestVersion(oras.PackManifestVersion1_0))
- }
-
- if cfg.BundlesDirectory != "" {
- dir = cfg.BundlesDirectory
- }
- }
-
- return oci.NewStore(logger, dir, opts...)
-}
-
-func writer() *tabwriter.Writer {
- return tabwriter.NewWriter(os.Stdout, 0, 0, 3, ' ', 0)
-}
diff --git a/cmd/flipt/export.go b/cmd/flipt/export.go
deleted file mode 100644
index 94170a542c..0000000000
--- a/cmd/flipt/export.go
+++ /dev/null
@@ -1,151 +0,0 @@
-package main
-
-import (
- "context"
- "fmt"
- "io"
- "os"
- "path/filepath"
- "time"
-
- "github.com/spf13/cobra"
- "go.flipt.io/flipt/internal/ext"
- "go.flipt.io/flipt/rpc/flipt"
-)
-
-type exportCommand struct {
- filename string
- address string
- token string
- namespaces string // comma delimited list of namespaces
- allNamespaces bool
- sortByKey bool
-}
-
-func newExportCommand() *cobra.Command {
- export := &exportCommand{}
-
- cmd := &cobra.Command{
- Use: "export",
- Short: "Export Flipt data to file/stdout",
- RunE: export.run,
- }
-
- cmd.Flags().StringVarP(
- &export.filename,
- "output", "o",
- "",
- "export to filename (default STDOUT)",
- )
-
- cmd.Flags().StringVarP(
- &export.address,
- "address", "a",
- "",
- "address of Flipt instance (defaults to direct DB export if not supplied).",
- )
-
- cmd.Flags().StringVarP(
- &export.token,
- "token", "t",
- "",
- "client token used to authenticate access to Flipt instance.",
- )
-
- cmd.Flags().StringVarP(
- &export.namespaces,
- "namespace", "n",
- flipt.DefaultNamespace,
- "source namespace for exported resources.",
- )
-
- cmd.Flags().StringVar(
- &export.namespaces,
- "namespaces",
- flipt.DefaultNamespace,
- "comma-delimited list of namespaces to export from. (mutually exclusive with --all-namespaces)",
- )
-
- cmd.Flags().BoolVar(
- &export.allNamespaces,
- "all-namespaces",
- false,
- "export all namespaces. (mutually exclusive with --namespaces)",
- )
-
- cmd.Flags().BoolVar(
- &export.sortByKey,
- "sort-by-key",
- false,
- "sort exported resources by key",
- )
-
- cmd.Flags().StringVar(&providedConfigFile, "config", "", "path to config file")
-
- cmd.MarkFlagsMutuallyExclusive("all-namespaces", "namespaces", "namespace")
-
- // We can ignore the error here since "namespace" will be a flag that exists.
- _ = cmd.Flags().MarkDeprecated("namespace", "please use namespaces instead")
-
- return cmd
-}
-
-func (c *exportCommand) run(cmd *cobra.Command, _ []string) error {
- var (
- ctx = cmd.Context()
- // default to stdout
- out io.Writer = os.Stdout
- enc = ext.EncodingYML
- )
-
- // export to file
- if c.filename != "" {
- fi, err := os.Create(c.filename)
- if err != nil {
- return fmt.Errorf("creating output file: %w", err)
- }
-
- defer fi.Close()
-
- fmt.Fprintf(fi, "# exported by Flipt (%s) on %s\n\n", version, time.Now().UTC().Format(time.RFC3339))
-
- out = fi
-
- if extn := filepath.Ext(c.filename); len(extn) > 0 {
- // strip off the leading .
- enc = ext.Encoding(extn[1:])
- }
- }
-
- // Use client when remote address is configured.
- if c.address != "" {
- client, err := fliptClient(c.address, c.token)
- if err != nil {
- return err
- }
- return c.export(ctx, enc, out, client)
- }
-
- // Otherwise, go direct to the DB using Flipt configuration file.
- logger, cfg, err := buildConfig(ctx)
- if err != nil {
- return err
- }
-
- defer func() {
- _ = logger.Sync()
- }()
-
- server, cleanup, err := fliptServer(logger, cfg)
- if err != nil {
- return err
- }
-
- defer cleanup()
-
- return c.export(ctx, enc, out, server)
-}
-
-func (c *exportCommand) export(ctx context.Context, enc ext.Encoding, dst io.Writer, lister ext.Lister) error {
- return ext.NewExporter(lister, c.namespaces, c.allNamespaces, c.sortByKey).Export(ctx, enc, dst)
-}
diff --git a/cmd/flipt/import.go b/cmd/flipt/import.go
deleted file mode 100644
index f0f9f3aeed..0000000000
--- a/cmd/flipt/import.go
+++ /dev/null
@@ -1,195 +0,0 @@
-package main
-
-import (
- "context"
- "errors"
- "fmt"
- "io"
- "os"
- "path/filepath"
-
- "github.com/spf13/cobra"
- "go.flipt.io/flipt/internal/ext"
- "go.flipt.io/flipt/internal/storage/sql"
- "go.flipt.io/flipt/rpc/flipt"
- sdk "go.flipt.io/flipt/sdk/go"
-)
-
-type importCommand struct {
- dropBeforeImport bool
- skipExisting bool
- importStdin bool
- address string
- token string
-}
-
-func newImportCommand() *cobra.Command {
- importCmd := &importCommand{}
-
- cmd := &cobra.Command{
- Use: "import",
- Short: "Import Flipt data from file/stdin",
- RunE: importCmd.run,
- }
-
- cmd.Flags().BoolVar(
- &importCmd.dropBeforeImport,
- "drop",
- false,
- "drop database before import",
- )
- cmd.Flags().BoolVar(
- &importCmd.skipExisting,
- "skip-existing",
- false,
- "only import new data",
- )
-
- cmd.Flags().BoolVar(
- &importCmd.importStdin,
- "stdin",
- false,
- "import from STDIN",
- )
-
- cmd.Flags().StringVarP(
- &importCmd.address,
- "address", "a",
- "",
- "address of Flipt instance (defaults to direct DB import if not supplied).",
- )
-
- cmd.Flags().StringVarP(
- &importCmd.token,
- "token", "t",
- "",
- "client token used to authenticate access to Flipt instance.",
- )
-
- cmd.Flags().StringVar(&providedConfigFile, "config", "", "path to config file")
- return cmd
-}
-
-func (c *importCommand) run(cmd *cobra.Command, args []string) error {
- var (
- ctx = cmd.Context()
- in io.Reader = os.Stdin
- enc = ext.EncodingYML
- )
-
- if !c.importStdin {
- if len(args) < 1 {
- return errors.New("import filename required")
- }
-
- importFilename := args[0]
- if importFilename == "" {
- return errors.New("import filename required")
- }
-
- f := filepath.Clean(importFilename)
-
- fi, err := os.Open(f)
- if err != nil {
- return fmt.Errorf("opening import file: %w", err)
- }
-
- defer fi.Close()
-
- in = fi
-
- if extn := filepath.Ext(importFilename); len(extn) > 0 {
- // strip off leading .
- enc = ext.Encoding(extn[1:])
- }
- }
-
- // Use client when remote address is configured.
- if c.address != "" {
- client, err := fliptClient(c.address, c.token)
- if err != nil {
- return err
- }
- if c.dropBeforeImport {
- if err := dropNamespaces(ctx, client); err != nil {
- return fmt.Errorf("dropping namespaces: %w", err)
- }
- }
- return ext.NewImporter(client).Import(ctx, enc, in, c.skipExisting)
- }
-
- logger, cfg, err := buildConfig(ctx)
- if err != nil {
- return err
- }
-
- defer func() {
- _ = logger.Sync()
- }()
-
- // drop tables if specified
- if c.dropBeforeImport {
-
- migrator, err := sql.NewMigrator(*cfg, logger)
- if err != nil {
- return err
- }
-
- if err := migrator.Drop(); err != nil {
- return fmt.Errorf("attempting to drop: %w", err)
- }
-
- if _, err := migrator.Close(); err != nil {
- return fmt.Errorf("closing migrator: %w", err)
- }
- }
-
- migrator, err := sql.NewMigrator(*cfg, logger)
- if err != nil {
- return err
- }
-
- if err := migrator.Up(forceMigrate); err != nil {
- return err
- }
-
- if _, err := migrator.Close(); err != nil {
- return fmt.Errorf("closing migrator: %w", err)
- }
-
- // Otherwise, go direct to the DB using Flipt configuration file.
- server, cleanup, err := fliptServer(logger, cfg)
- if err != nil {
- return err
- }
-
- defer cleanup()
-
- return ext.NewImporter(
- server,
- ).Import(ctx, enc, in, c.skipExisting)
-}
-
-func dropNamespaces(ctx context.Context, client *sdk.Flipt) error {
- namespaces, err := client.ListNamespaces(ctx, &flipt.ListNamespaceRequest{})
- if err != nil {
- return err
- }
-
- for _, ns := range namespaces.Namespaces {
- if err := client.DeleteNamespace(ctx, &flipt.DeleteNamespaceRequest{
- Key: ns.Key,
- Force: true,
- }); err != nil {
- return err
- }
- }
-
- _, err = client.CreateNamespace(ctx, &flipt.CreateNamespaceRequest{
- Key: "default",
- Name: "Default",
- Description: "Default namespace",
- })
-
- return err
-}
diff --git a/cmd/flipt/main.go b/cmd/flipt/main.go
index c1bd60285e..663021d647 100644
--- a/cmd/flipt/main.go
+++ b/cmd/flipt/main.go
@@ -141,13 +141,10 @@ func exec() error {
_ = rootCmd.Flags().MarkHidden("force-migrate")
rootCmd.AddCommand(newMigrateCommand())
- rootCmd.AddCommand(newExportCommand())
- rootCmd.AddCommand(newImportCommand())
rootCmd.AddCommand(newValidateCommand())
rootCmd.AddCommand(newConfigCommand())
rootCmd.AddCommand(newCompletionCommand())
rootCmd.AddCommand(newDocCommand())
- rootCmd.AddCommand(newBundleCommand())
rootCmd.AddCommand(newEvaluateCommand())
ctx, cancel := context.WithCancel(context.Background())
diff --git a/cmd/flipt/migrate.go b/cmd/flipt/migrate.go
index 381642574a..852b7612f9 100644
--- a/cmd/flipt/migrate.go
+++ b/cmd/flipt/migrate.go
@@ -10,8 +10,7 @@ import (
)
const (
- defaultConfig = "default"
- analytics = "analytics"
+ analytics = "analytics"
)
var database string
@@ -22,16 +21,13 @@ func runMigrations(cfg *config.Config, logger *zap.Logger, database string) erro
err error
)
- if database == analytics {
- migrator, err = sql.NewAnalyticsMigrator(*cfg, logger)
- if err != nil {
- return err
- }
- } else {
- migrator, err = sql.NewMigrator(*cfg, logger)
- if err != nil {
- return err
- }
+ if database != analytics {
+ return fmt.Errorf("database %s not supported", database)
+ }
+
+ migrator, err = sql.NewAnalyticsMigrator(*cfg, logger)
+ if err != nil {
+ return err
}
defer migrator.Close()
@@ -58,23 +54,15 @@ func newMigrateCommand() *cobra.Command {
_ = logger.Sync()
}()
- // Run the OLTP and OLAP database migrations sequentially because of
- // potential danger in DB migrations in general.
- if err := runMigrations(cfg, logger, defaultConfig); err != nil {
+ if err := runMigrations(cfg, logger, database); err != nil {
return err
}
- if database == analytics {
- if err := runMigrations(cfg, logger, analytics); err != nil {
- return err
- }
- }
-
return nil
},
}
cmd.Flags().StringVar(&providedConfigFile, "config", "", "path to config file")
- cmd.Flags().StringVar(&database, "database", "default", "string to denote which database type to migrate")
+ cmd.Flags().StringVar(&database, "database", "analytics", "string to denote which database type to migrate")
return cmd
}
diff --git a/cmd/flipt/server.go b/cmd/flipt/server.go
index 65222081a5..55407d839c 100644
--- a/cmd/flipt/server.go
+++ b/cmd/flipt/server.go
@@ -4,45 +4,13 @@ import (
"fmt"
"net/url"
- "go.flipt.io/flipt/internal/config"
- "go.flipt.io/flipt/internal/server"
- "go.flipt.io/flipt/internal/storage"
- "go.flipt.io/flipt/internal/storage/sql"
- "go.flipt.io/flipt/internal/storage/sql/mysql"
- "go.flipt.io/flipt/internal/storage/sql/postgres"
- "go.flipt.io/flipt/internal/storage/sql/sqlite"
sdk "go.flipt.io/flipt/sdk/go"
sdkgrpc "go.flipt.io/flipt/sdk/go/grpc"
sdkhttp "go.flipt.io/flipt/sdk/go/http"
- "go.uber.org/zap"
"google.golang.org/grpc"
"google.golang.org/grpc/credentials/insecure"
)
-func fliptServer(logger *zap.Logger, cfg *config.Config) (*server.Server, func(), error) {
- db, driver, err := sql.Open(*cfg)
- if err != nil {
- return nil, nil, fmt.Errorf("opening db: %w", err)
- }
-
- logger.Debug("constructing builder", zap.Bool("prepared_statements", cfg.Database.PreparedStatementsEnabled))
-
- builder := sql.BuilderFor(db, driver, cfg.Database.PreparedStatementsEnabled)
-
- var store storage.Store
-
- switch driver {
- case sql.SQLite, sql.LibSQL:
- store = sqlite.NewStore(db, builder, logger)
- case sql.Postgres, sql.CockroachDB:
- store = postgres.NewStore(db, builder, logger)
- case sql.MySQL:
- store = mysql.NewStore(db, builder, logger)
- }
-
- return server.New(logger, store), func() { _ = db.Close() }, nil
-}
-
func fliptSDK(address, token string) (*sdk.SDK, error) {
addr, err := url.Parse(address)
if err != nil {
@@ -74,11 +42,3 @@ func fliptSDK(address, token string) (*sdk.SDK, error) {
s := sdk.New(transport, opts...)
return &s, nil
}
-
-func fliptClient(address, token string) (*sdk.Flipt, error) {
- s, err := fliptSDK(address, token)
- if err != nil {
- return nil, err
- }
- return s.Flipt(), nil
-}
diff --git a/config/default.yml b/config/default.yml
deleted file mode 100644
index 56389f0667..0000000000
--- a/config/default.yml
+++ /dev/null
@@ -1,49 +0,0 @@
-# yaml-language-server: $schema=https://raw.githubusercontent.com/flipt-io/flipt/main/config/flipt.schema.json
-
-# version: "1.0"
-# log:
-# level: INFO
-# file:
-# encoding: console
-# grpc_level: ERROR
-
-# ui:
-# enabled: true
-# default_theme: system
-
-# cors:
-# enabled: false
-# allowed_origins: "*"
-
-# cache:
-# enabled: false
-# backend: memory
-# ttl: 60s
-# redis:
-# host: localhost
-# port: 6379
-# memory:
-# eviction_interval: 5m # Evict Expired Items Every 5m
-
-# server:
-# protocol: http
-# host: 0.0.0.0
-# https_port: 443
-# http_port: 8080
-# grpc_port: 9000
-
-# db:
-# url: file:/var/opt/flipt/flipt.db
-# max_idle_conn: 2
-# max_open_conn: 0 # unlimited
-# conn_max_lifetime: 0 # unlimited
-
-# tracing:
-# enabled: false
-# exporter: jaeger
-# jaeger:
-# host: localhost
-# port: 6831
-
-# meta:
-# check_for_updates: true
diff --git a/config/flipt.schema.cue b/config/flipt.schema.cue
index d1cbf1fd87..bec8ada69b 100644
--- a/config/flipt.schema.cue
+++ b/config/flipt.schema.cue
@@ -1,26 +1,23 @@
package flipt
import "strings"
+
import "list"
#FliptSpec: {
- // flipt-schema-v1
+ // flipt-schema-v2
//
// Flipt config file is a YAML file defining how to configure the
// Flipt application.
@jsonschema(schema="http://json-schema.org/draft/2019-09/schema#")
- version?: "1.0" | *"1.0"
+ version: "2.0" | *"2.0"
experimental?: #experimental
- analytics: #analytics
- audit?: #audit
+ analytics?: #analytics
authentication?: #authentication
authorization?: #authorization
- cache?: #cache
- cloud?: #cloud
cors?: #cors
diagnostics?: #diagnostics
storage?: #storage
- db?: #db
log?: #log
meta?: #meta
server?: #server
@@ -36,6 +33,7 @@ import "list"
evaluation: bool | *false
ofrep: bool | *false
}
+
session?: {
domain?: string
secure?: bool
@@ -44,26 +42,54 @@ import "list"
csrf?: {
key: string
}
+
+ storage?: *{
+ type: "memory"
+ cleanup?: #authentication.#storage_cleanup
+ } | {
+ type: "redis"
+ cleanup?: #authentication.#storage_cleanup
+ connection: {
+ host?: string | *"localhost"
+ port?: int | *6379
+ require_tls?: bool | *false
+ db?: int | *0
+ username?: string
+ password?: string
+ pool_size?: int | *0
+ min_idle_conn?: int | *0
+ conn_max_idle_time?: =~#duration | int | *0
+ net_timeout?: =~#duration | int | *0
+ ca_cert_path?: string
+ ca_cert_bytes?: string
+ insecure_skip_tls?: bool | *false
+ }
+ }
+
+ #storage_cleanup: {
+ @jsonschema(id="storage_cleanup")
+ interval?: =~#duration | int | *"1h"
+ grace_period?: =~#duration | int | *"30m"
+ }
}
methods?: {
token?: {
enabled?: bool | *false
- cleanup?: #authentication.#authentication_cleanup
- bootstrap?: {
- token?: string
- expiration: =~#duration | int
- metadata?: [string]: string
+ storage?: {
+ type: "static"
+ tokens: [
+ ...{
+ name: string
+ credential: string
+ metadata: [string]: string
+ },
+ ]
}
}
- cloud?: {
- enabled?: bool | *false
- }
-
oidc?: {
enabled?: bool | *false
- cleanup?: #authentication.#authentication_cleanup
providers?: {
{[=~"^.*$" & !~"^()$"]: #authentication.#authentication_oidc_provider}
}
@@ -75,7 +101,6 @@ import "list"
discovery_url: string
ca_path: string
service_account_token_path: string
- cleanup?: #authentication.#authentication_cleanup
}
github?: {
@@ -102,27 +127,22 @@ import "list"
}
}
- #authentication_cleanup: {
- @jsonschema(id="authentication_cleanup")
- interval?: =~#duration | int | *"1h"
- grace_period?: =~#duration | int | *"30m"
- }
-
#authentication_oidc_provider: {
@jsonschema(id="authentication_oidc_provider")
issuer_url?: string
client_id?: string
client_secret?: string
redirect_address?: string
- nonce?: string
+ nonce?: string
scopes?: [...string]
use_pkce?: bool
}
+
}
#authorization: {
required?: bool | *false
- backend: "local" | "object" | "bundle" | "cloud" | *""
+ backend: "local" | "bundle" | *""
local?: {
policy?: {
poll_interval: =~#duration | *"5m"
@@ -133,58 +153,9 @@ import "list"
path: string
}
}
- object?: {
- type: "s3" | *""
- s3?: {
- region: string
- bucket: string
- prefix?: string
- endpoint?: string
- }
- }
bundle?: {
configuration: string
}
- cloud?: {
- poll_interval: =~#duration | *"5m"
- }
- }
-
- #cache: {
- enabled?: bool | *false
- backend?: *"memory" | "redis"
- ttl?: =~#duration | int | *"60s"
-
- redis?: {
- host?: string | *"localhost"
- port?: int | *6379
- require_tls?: bool | *false
- db?: int | *0
- username?: string
- password?: string
- pool_size?: int | *0
- min_idle_conn?: int | *0
- conn_max_idle_time?: =~#duration | int | *0
- net_timeout?: =~#duration | int | *0
- ca_cert_path?: string
- ca_cert_bytes?: string
- insecure_skip_tls?: bool | *false
- }
-
- memory?: {
- enabled?: bool | *false
- eviction_interval?: =~#duration | int | *"5m"
- expiration?: =~#duration | int | *"60s"
- }
- }
-
- #cloud: {
- host?: string | *"flipt.cloud"
- organization?: string
- gateway?: string
- authentication?: {
- api_key?: string
- }
}
#cors: {
@@ -195,9 +166,6 @@ import "list"
"Authorization",
"Content-Type",
"X-CSRF-Token",
- "X-Fern-Language",
- "X-Fern-SDK-Name",
- "X-Fern-SDK-Version",
"X-Flipt-Namespace",
"X-Flipt-Accept-Server-Version",
]
@@ -209,93 +177,75 @@ import "list"
}
}
- #storage: {
- type: "database" | "git" | "local" | "object" | "oci" | *""
- read_only?: bool | *false
- local?: path: string | *"."
- git?: {
- repository: string
- backend?: {
- type: *"memory" | "local"
- path?: string
- }
- ref?: string | *"main"
- ref_type?: *"static" | "semver"
- directory?: string
- poll_interval?: =~#duration | *"30s"
- ca_cert_path?: string
- ca_cert_bytes?: string
- insecure_skip_tls?: bool | *false
- authentication?: ({
- basic: {
- username: string
- password: string
- }
- } | {
- token: access_token: string
- } | {
- ssh: {
- user?: string | *"git"
- password: string
- private_key_path: string
- }
- } | {
- ssh: {
- user?: string | *"git"
- password: string
- private_key_bytes: string
- }
- })
+ #environments: [Name=string]: {
+ default: bool | *false
+ storage: string
+ directory: string | *""
+ }
+
+ #storage: [Name=string]: {
+ // remote is the target upstream remote.
+ // configuring this property ensures that changes (writes)
+ // to flipt metadata configuration are pushed to the upstream
+ // before returning the response to the caller.
+ remote?: string
+ // backend configures whether or not the target repository is
+ // managed entirely in-memory or on the local disk.
+ backend?: {
+ type: *"memory" | "local"
+ path?: string
}
- object?: {
- type: "s3" | "azblob" | "googlecloud" | *""
- s3?: {
- region: string
- bucket: string
- prefix?: string
- endpoint?: string
- poll_interval?: =~#duration | *"1m"
+ branch?: string | *"main"
+ poll_interval?: =~#duration | *"30s"
+ ca_cert_path?: string
+ ca_cert_bytes?: string
+ insecure_skip_tls?: bool | *false
+ authentication?: ({
+ basic: {
+ username: string
+ password: string
}
- azblob?: {
- container: string
- endpoint?: string
- poll_interval?: =~#duration | *"1m"
+ } | {
+ token: access_token: string
+ } | {
+ ssh: {
+ user?: string | *"git"
+ password: string
+ private_key_path: string
}
- googlecloud?: {
- bucket: string
- prefix?: string
- poll_interval?: =~#duration | *"1m"
+ } | {
+ ssh: {
+ user?: string | *"git"
+ password: string
+ private_key_bytes: string
}
- }
- oci?: {
- repository: string
- bundles_directory?: string
- authentication?: {
- type: "aws-ecr" | *"static"
- username: string
- password: string
+ })
+ // publishers configures destinations for the storage engine
+ // to publish when new version of state become available
+ publishers?: {
+ object?: {
+ type: "s3" | "azblob" | "googlecloud" | *""
+ s3?: {
+ region: string
+ bucket: string
+ prefix?: string
+ endpoint?: string
+ poll_interval?: =~#duration | *"1m"
+ }
+ azblob?: {
+ container: string
+ endpoint?: string
+ poll_interval?: =~#duration | *"1m"
+ }
+ googlecloud?: {
+ bucket: string
+ prefix?: string
+ poll_interval?: =~#duration | *"1m"
+ }
}
- poll_interval?: =~#duration | *"30s"
- manifest_version?: "1.0" | *"1.1"
}
}
- #db: {
- password?: string
- max_idle_conn?: int | *2
- max_open_conn?: int
- conn_max_lifetime?: =~#duration | int
- prepared_statements_enabled?: bool | *true
- } & ({
- url?: string | *"file:/var/opt/flipt/flipt.db"
- } | {
- protocol?: *"sqlite" | "cockroach" | "cockroachdb" | "file" | "mysql" | "postgres"
- host?: string
- port?: int
- name?: string
- user?: string
- })
-
_#lower: ["debug", "error", "fatal", "info", "panic", "warn"]
_#all: list.Concat([_#lower, [for x in _#lower {strings.ToUpper(x)}]])
#log: {
@@ -329,10 +279,6 @@ import "list"
grpc_conn_max_idle_time?: =~#duration
grpc_conn_max_age?: =~#duration
grpc_conn_max_age_grace?: =~#duration
- cloud?: {
- enabled?: bool | *false
- port?: int | *8443
- }
}
#metrics: {
@@ -347,22 +293,12 @@ import "list"
#tracing: {
enabled?: bool | *false
- exporter?: *"jaeger" | "zipkin" | "otlp"
+ exporter?: *"otlp"
sampling_ratio?: float & >=0 & <=1 | *1
propagators?: [
..."tracecontext" | "baggage" | "b3" | "b3multi" | "jaeger" | "xray" | "ottrace" | "none",
] | *["tracecontext", "baggage"]
- jaeger?: {
- enabled?: bool | *false
- host?: string | *"localhost"
- port?: int | *6831
- }
-
- zipkin?: {
- endpoint?: string | *"http://localhost:9411/api/v2/spans"
- }
-
otlp?: {
endpoint?: string | *"localhost:4317"
headers?: [string]: string
@@ -370,7 +306,6 @@ import "list"
}
#ui: {
- enabled?: bool | *true
default_theme?: "light" | "dark" | *"system"
topbar?: {
color?: string
@@ -378,50 +313,6 @@ import "list"
}
}
- #audit: {
- sinks?: {
- log?: {
- enabled?: bool | *false
- file?: string | *""
- encoding?: *"" | "json" | "console"
- }
- webhook?: {
- enabled?: bool | *false
- url?: string | *""
- max_backoff_duration?: =~#duration | *"15s"
- signing_secret?: string | *""
- templates?: [...{
- url: string
- body: string
- headers?: [string]: string
- }]
- }
- cloud?: {
- enabled?: bool | *false
- }
- kafka?: {
- enabled?: bool | *false
- topic: string
- bootstrap_servers: [...string]
- encoding?: *"protobuf" | "avro"
- schema_registry?: {
- url: string
- } | null
- require_tls?: bool | *false
- insecure_skip_tls?: bool | *false
- authentication?: {
- username: string
- password: string
- } | null
- }
- }
- buffer?: {
- capacity?: int | *2
- flush_period?: string | *"2m"
- }
- events?: [...string] | *["*:*"]
- }
-
#analytics: {
storage?: {
clickhouse?: {
@@ -440,11 +331,7 @@ import "list"
}
}
- #experimental: {
- cloud?: {
- enabled?: bool | *false
- }
- }
+ #experimental: {}
#duration: "^([0-9]+(ns|us|ยตs|ms|s|m|h))+$"
}
diff --git a/config/flipt.schema.json b/config/flipt.schema.json
index 22f40e6755..d07ea2b6f3 100644
--- a/config/flipt.schema.json
+++ b/config/flipt.schema.json
@@ -2,14 +2,14 @@
"$schema": "http://json-schema.org/draft/2019-09/schema#",
"id": "flipt.schema.json",
"type": "object",
- "title": "flipt-schema-v1",
+ "title": "flipt-schema-v2",
"description": "Flipt config file is a YAML file defining how to configure the Flipt application.",
"properties": {
"version": {
"type": "string",
- "enum": ["1.0"],
- "default": "1.0"
+ "enum": ["2.0"],
+ "default": "2.0"
},
"audit": {
"$ref": "#/definitions/audit"
@@ -23,15 +23,9 @@
"cache": {
"$ref": "#/definitions/cache"
},
- "cloud": {
- "$ref": "#/definitions/cloud"
- },
"cors": {
"$ref": "#/definitions/cors"
},
- "db": {
- "$ref": "#/definitions/db"
- },
"diagnostics": {
"$ref": "#/definitions/diagnostics"
},
@@ -136,15 +130,6 @@
"title": "Token",
"additionalProperties": false
},
- "cloud": {
- "type": "object",
- "properties": {
- "enabled": {
- "type": "boolean",
- "default": false
- }
- }
- },
"oidc": {
"type": "object",
"properties": {
@@ -569,31 +554,6 @@
"required": [],
"title": "Cache"
},
- "cloud": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "host": {
- "type": "string",
- "default": "flipt.cloud"
- },
- "organization": {
- "type": "string"
- },
- "gateway": {
- "type": "string"
- },
- "authentication": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "api_key": {
- "type": "string"
- }
- }
- }
- }
- },
"cors": {
"type": "object",
"additionalProperties": false,
@@ -613,9 +573,6 @@
"Authorization",
"Content-Type",
"X-CSRF-Token",
- "X-Fern-Language",
- "X-Fern-SDK-Name",
- "X-Fern-SDK-Version",
"X-Flipt-Namespace",
"X-Flipt-Accept-Server-Version"
]
@@ -642,65 +599,64 @@
"required": [],
"title": "Diagnostics"
},
- "db": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "url": {
- "type": "string",
- "default": "file:/var/opt/flipt/flipt.db"
- },
- "protocol": {
- "type": "string",
- "enum": [
- "cockroach",
- "cockroachdb",
- "file",
- "mysql",
- "postgres",
- "sqlite"
- ]
- },
- "host": {
- "type": "string"
- },
- "port": {
- "type": "integer"
- },
- "name": {
- "type": "string"
- },
- "user": {
- "type": "string"
- },
- "password": {
- "type": "string"
- },
- "max_idle_conn": {
- "type": "integer",
- "default": 2
- },
- "max_open_conn": {
- "type": "integer"
- },
- "conn_max_lifetime": {
- "oneOf": [{ "type": "integer" }, { "type": "string" }]
- },
- "prepared_statements_enabled": {
- "type": "boolean"
- }
- },
- "required": [],
- "title": "DB"
- },
+
"storage": {
"type": "object",
"additionalProperties": false,
"properties": {
+ "authentication": {
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "url": {
+ "type": "string",
+ "default": "file:/var/opt/flipt/flipt.db"
+ },
+ "protocol": {
+ "type": "string",
+ "enum": [
+ "file",
+ "mysql",
+ "postgres",
+ "sqlite"
+ ]
+ },
+ "host": {
+ "type": "string"
+ },
+ "port": {
+ "type": "integer"
+ },
+ "name": {
+ "type": "string"
+ },
+ "user": {
+ "type": "string"
+ },
+ "password": {
+ "type": "string"
+ },
+ "max_idle_conn": {
+ "type": "integer",
+ "default": 2
+ },
+ "max_open_conn": {
+ "type": "integer"
+ },
+ "conn_max_lifetime": {
+ "oneOf": [{ "type": "integer" }, { "type": "string" }]
+ },
+ "prepared_statements_enabled": {
+ "type": "boolean"
+ }
+ },
+ "required": [],
+ "title": "Authentication"
+ },
"type": {
"type": "string",
- "enum": ["database", "git", "local", "object", "oci"],
- "default": "database"
+ "enum": ["git", "local", "object"],
+ "default": "local"
},
"read_only": {
"type": "boolean",
@@ -930,49 +886,6 @@
}
},
"title": "Object"
- },
- "oci": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "repository": {
- "type": "string"
- },
- "bundles_directory": {
- "type": "string"
- },
- "authentication": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "type": {
- "type": "string",
- "enum": ["static", "aws-ecr"],
- "default": "static"
- },
- "username": { "type": "string" },
- "password": { "type": "string" }
- }
- },
- "poll_interval": {
- "oneOf": [
- {
- "type": "string",
- "pattern": "^([0-9]+(ns|us|ยตs|ms|s|m|h))+$"
- },
- {
- "type": "integer"
- }
- ],
- "default": "1m"
- },
- "manifest_version": {
- "type": "string",
- "enum": ["1.0", "1.1"],
- "default": "1.1"
- }
- },
- "title": "OCI"
}
},
"required": [],
@@ -1106,20 +1019,6 @@
"grpc_conn_max_age_grace": {
"type": "string",
"pattern": "^([0-9]+(ns|us|ยตs|ms|s|m|h))+$"
- },
- "cloud": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "enabled": {
- "type": "boolean",
- "default": false
- },
- "port": {
- "type": "integer",
- "default": 8443
- }
- }
}
},
"required": [],
@@ -1245,11 +1144,6 @@
"type": "object",
"additionalProperties": false,
"properties": {
- "enabled": {
- "type": "boolean",
- "default": true,
- "deprecated": true
- },
"default_theme": {
"type": "string",
"enum": ["light", "dark", "system"],
@@ -1347,17 +1241,6 @@
},
"title": "Webhook"
},
- "cloud": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "enabled": {
- "type": "boolean",
- "default": false
- }
- },
- "title": "Cloud"
- },
"kafka": {
"type": "object",
"additionalProperties": false,
@@ -1421,10 +1304,6 @@
"default": "2m"
}
}
- },
- "events": {
- "type": "array",
- "default": ["*:*"]
}
},
"title": "Audit"
@@ -1495,16 +1374,6 @@
"type": "object",
"additionalProperties": false,
"properties": {
- "cloud": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "enabled": {
- "type": "boolean",
- "default": false
- }
- }
- }
},
"title": "Experimental"
}
diff --git a/config/migrations/clickhouse/0_initial_clickhouse.up.sql b/config/migrations/clickhouse/0_initial_clickhouse.up.sql
deleted file mode 100644
index decbc90c19..0000000000
--- a/config/migrations/clickhouse/0_initial_clickhouse.up.sql
+++ /dev/null
@@ -1,5 +0,0 @@
-CREATE TABLE IF NOT EXISTS flipt_counter_analytics (
- `timestamp` DateTime('UTC'), `analytic_name` String, `namespace_key` String, `flag_key` String, `flag_type` Enum('VARIANT_FLAG_TYPE' = 1, 'BOOLEAN_FLAG_TYPE' = 2), `reason` Enum('UNKNOWN_EVALUATION_REASON' = 1, 'FLAG_DISABLED_EVALUATION_REASON' = 2, 'MATCH_EVALUATION_REASON' = 3, 'DEFAULT_EVALUATION_REASON' = 4), `match` Nullable(Bool), `evaluation_value` Nullable(String), `value` UInt32
-) Engine = MergeTree
-ORDER BY timestamp
-TTL timestamp + INTERVAL 1 WEEK;
diff --git a/config/migrations/clickhouse/3_alter_add_entity_id.up.sql b/config/migrations/clickhouse/3_alter_add_entity_id.up.sql
deleted file mode 100644
index 65918c5479..0000000000
--- a/config/migrations/clickhouse/3_alter_add_entity_id.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE flipt_counter_analytics ADD COLUMN entity_id Nullable(String) AFTER evaluation_value;
diff --git a/config/migrations/cockroachdb/0_initial.up.sql b/config/migrations/cockroachdb/0_initial.up.sql
deleted file mode 100644
index 94ddf804a3..0000000000
--- a/config/migrations/cockroachdb/0_initial.up.sql
+++ /dev/null
@@ -1,58 +0,0 @@
-CREATE TABLE IF NOT EXISTS flags (
- key VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- enabled BOOLEAN DEFAULT FALSE NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS segments (
- key VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- match_type INTEGER DEFAULT 0 NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS variants (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- flag_key VARCHAR(255) NOT NULL REFERENCES flags ON DELETE CASCADE,
- key VARCHAR(255) NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- attachment JSONB,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- CONSTRAINT variants_flag_key_key UNIQUE(flag_key, key)
-);
-
-CREATE TABLE IF NOT EXISTS constraints (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- segment_key VARCHAR(255) NOT NULL REFERENCES segments ON DELETE CASCADE,
- type INTEGER DEFAULT 0 NOT NULL,
- property VARCHAR(255) NOT NULL,
- operator VARCHAR(255) NOT NULL,
- value TEXT NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS rules (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- flag_key VARCHAR(255) NOT NULL REFERENCES flags ON DELETE CASCADE,
- segment_key VARCHAR(255) NOT NULL REFERENCES segments ON DELETE CASCADE,
- rank INTEGER DEFAULT 1 NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS distributions (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- rule_id VARCHAR(255) NOT NULL REFERENCES rules ON DELETE CASCADE,
- variant_id VARCHAR(255) NOT NULL REFERENCES variants ON DELETE CASCADE,
- rollout float DEFAULT 0 NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
diff --git a/config/migrations/cockroachdb/10_namespaces_add_state_modified_at.up.sql b/config/migrations/cockroachdb/10_namespaces_add_state_modified_at.up.sql
deleted file mode 100644
index 1d8bb69620..0000000000
--- a/config/migrations/cockroachdb/10_namespaces_add_state_modified_at.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE namespaces ADD COLUMN state_modified_at TIMESTAMP;
diff --git a/config/migrations/cockroachdb/11_default_variant.up.sql b/config/migrations/cockroachdb/11_default_variant.up.sql
deleted file mode 100644
index 1f6ac9c5b5..0000000000
--- a/config/migrations/cockroachdb/11_default_variant.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE flags ADD COLUMN default_variant_id VARCHAR(255) REFERENCES variants(id) ON DELETE SET NULL;
\ No newline at end of file
diff --git a/config/migrations/cockroachdb/12_flag_metadata.up.sql b/config/migrations/cockroachdb/12_flag_metadata.up.sql
deleted file mode 100644
index ac21fc53a7..0000000000
--- a/config/migrations/cockroachdb/12_flag_metadata.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE flags ADD COLUMN metadata JSON;
diff --git a/config/migrations/cockroachdb/13_segment_foreign_keys.up.sql b/config/migrations/cockroachdb/13_segment_foreign_keys.up.sql
deleted file mode 100644
index e74901b0fb..0000000000
--- a/config/migrations/cockroachdb/13_segment_foreign_keys.up.sql
+++ /dev/null
@@ -1,10 +0,0 @@
-BEGIN;
-ALTER TABLE rule_segments DROP CONSTRAINT fk_namespace_key_ref_segments;
-COMMIT;
-BEGIN;
-ALTER TABLE rule_segments ADD CONSTRAINT fk_namespace_key_ref_segments FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, key) ON DELETE RESTRICT;
-
-ALTER TABLE rollout_segment_references DROP CONSTRAINT fk_namespace_key_ref_segments;
-COMMIT;
-ALTER TABLE rollout_segment_references
- ADD CONSTRAINT fk_namespace_key_ref_segments FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, key) ON DELETE RESTRICT;
diff --git a/config/migrations/cockroachdb/1_create_table_authentications.up.sql b/config/migrations/cockroachdb/1_create_table_authentications.up.sql
deleted file mode 100644
index b3c4c63fba..0000000000
--- a/config/migrations/cockroachdb/1_create_table_authentications.up.sql
+++ /dev/null
@@ -1,11 +0,0 @@
-CREATE TABLE IF NOT EXISTS authentications (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- hashed_client_token VARCHAR(255) UNIQUE NOT NULL,
- method INTEGER DEFAULT 0 NOT NULL,
- metadata TEXT,
- expires_at TIMESTAMP,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-CREATE UNIQUE INDEX hashed_client_token_authentications_index ON authentications (hashed_client_token);
diff --git a/config/migrations/cockroachdb/2_create_table_operation_lock.up.sql b/config/migrations/cockroachdb/2_create_table_operation_lock.up.sql
deleted file mode 100644
index ca9392a10a..0000000000
--- a/config/migrations/cockroachdb/2_create_table_operation_lock.up.sql
+++ /dev/null
@@ -1,6 +0,0 @@
-CREATE TABLE IF NOT EXISTS operation_lock (
- operation VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- version INTEGER DEFAULT 0 NOT NULL,
- last_acquired_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
- acquired_until TIMESTAMP DEFAULT CURRENT_TIMESTAMP
-);
diff --git a/config/migrations/cockroachdb/3_create_namespaces.up.sql b/config/migrations/cockroachdb/3_create_namespaces.up.sql
deleted file mode 100644
index ad983dc8f2..0000000000
--- a/config/migrations/cockroachdb/3_create_namespaces.up.sql
+++ /dev/null
@@ -1,12 +0,0 @@
--- Create namespaces table
-CREATE TABLE IF NOT EXISTS namespaces (
- key VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- protected BOOLEAN DEFAULT FALSE NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
--- Create default namespace
-INSERT INTO namespaces (key, name, description, protected) VALUES ('default', 'Default', 'Default namespace', true);
diff --git a/config/migrations/cockroachdb/4_namespaces_relationships.up.sql b/config/migrations/cockroachdb/4_namespaces_relationships.up.sql
deleted file mode 100644
index 182d476706..0000000000
--- a/config/migrations/cockroachdb/4_namespaces_relationships.up.sql
+++ /dev/null
@@ -1,70 +0,0 @@
--- Flags
-------------------
-BEGIN;
--- Add column namespace_key with a default value
-ALTER TABLE flags ADD COLUMN namespace_key VARCHAR(255) NOT NULL DEFAULT 'default';
-
--- Add foreign key constraint on namespace_key column referencing key column of namespaces table
-ALTER TABLE flags ADD FOREIGN KEY (namespace_key) REFERENCES namespaces(key) ON DELETE CASCADE;
-COMMIT;
--- Drop primary key constraint and add a new composite primary key on namespace_key and key columns
-BEGIN;
-ALTER TABLE flags ALTER PRIMARY KEY USING COLUMNS (namespace_key, key);
-COMMIT;
-DROP INDEX IF EXISTS flags_key_key CASCADE;
-
--- Variants
-------------------
-
--- Add column namespace_key with a default value
-ALTER TABLE variants ADD COLUMN namespace_key VARCHAR(255) NOT NULL DEFAULT 'default';
-
--- Drop previously created unique index
-DROP INDEX IF EXISTS "variants_flag_key_key" CASCADE;
-
--- Add unique index on namespace_key, flag_key and key columns
-ALTER TABLE variants ADD CONSTRAINT "variants_namespace_flag_key" UNIQUE (namespace_key, flag_key, key);
-
--- Add foreign key constraint on namespace_key column referencing key column of namespaces table
-ALTER TABLE variants ADD FOREIGN KEY (namespace_key) REFERENCES namespaces(key) ON DELETE CASCADE;
-
--- Add foreign key constraint on namespace_key and flag_key columns referencing namespace_key and key columns of flags table
-ALTER TABLE variants ADD FOREIGN KEY (namespace_key, flag_key) REFERENCES flags(namespace_key, key) ON DELETE CASCADE;
-
--- Segments
-------------------
-BEGIN;
--- Add column namespace_key with a default value
-ALTER TABLE segments ADD COLUMN namespace_key VARCHAR(255) NOT NULL DEFAULT 'default';
-
--- Add foreign key constraint on namespace_key column referencing key column of namespaces table
-ALTER TABLE segments ADD FOREIGN KEY (namespace_key) REFERENCES namespaces(key) ON DELETE CASCADE;
-COMMIT;
--- Drop primary key constraint and add a new composite primary key on namespace_key and key columns
-BEGIN;
-ALTER TABLE segments ALTER PRIMARY KEY USING COLUMNS (namespace_key, key);
-COMMIT;
-DROP INDEX IF EXISTS segments_key_key CASCADE;
-
--- Constraints
-------------------
-
--- Add column namespace_key with a default value
-ALTER TABLE constraints ADD COLUMN namespace_key VARCHAR(255) NOT NULL DEFAULT 'default';
-
--- Add foreign key constraint on namespace_key column referencing key column of namespaces table
-ALTER TABLE constraints ADD FOREIGN KEY (namespace_key) REFERENCES namespaces(key) ON DELETE CASCADE;
-
--- Add foreign key constraint on namespace_key and segment_key columns referencing namespace_key and key columns of segments table
-ALTER TABLE constraints ADD FOREIGN KEY (namespace_key, segment_key) REFERENCES segments(namespace_key, key) ON DELETE CASCADE;
-
--- Rules
-------------------
-
--- Add column namespace_key with a default value
-ALTER TABLE rules ADD COLUMN namespace_key VARCHAR(255) NOT NULL DEFAULT 'default';
-
--- Add foreign key constraint on namespace_key column referencing key column of namespaces table
-ALTER TABLE rules ADD FOREIGN KEY (namespace_key) REFERENCES namespaces(key) ON DELETE CASCADE;
-ALTER TABLE rules ADD CONSTRAINT fk_namespace_key_ref_segments FOREIGN KEY (namespace_key, flag_key) REFERENCES flags(namespace_key, key) ON DELETE CASCADE;
-ALTER TABLE rules ADD FOREIGN KEY (namespace_key, segment_key) REFERENCES segments(namespace_key, key) ON DELETE CASCADE;
diff --git a/config/migrations/cockroachdb/5_constraints_with_description.up.sql b/config/migrations/cockroachdb/5_constraints_with_description.up.sql
deleted file mode 100644
index 4b77d4c5f9..0000000000
--- a/config/migrations/cockroachdb/5_constraints_with_description.up.sql
+++ /dev/null
@@ -1,2 +0,0 @@
--- Add description column to constraints
-ALTER TABLE constraints ADD COLUMN description TEXT;
\ No newline at end of file
diff --git a/config/migrations/cockroachdb/6_flag_type.up.sql b/config/migrations/cockroachdb/6_flag_type.up.sql
deleted file mode 100644
index f7f32f1fe9..0000000000
--- a/config/migrations/cockroachdb/6_flag_type.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE flags ADD COLUMN type INTEGER DEFAULT 0 NOT NULL;
\ No newline at end of file
diff --git a/config/migrations/cockroachdb/7_rollouts.up.sql b/config/migrations/cockroachdb/7_rollouts.up.sql
deleted file mode 100644
index 80f9eee045..0000000000
--- a/config/migrations/cockroachdb/7_rollouts.up.sql
+++ /dev/null
@@ -1,29 +0,0 @@
-CREATE TABLE IF NOT EXISTS rollouts (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- namespace_key VARCHAR(255) NOT NULL REFERENCES namespaces ON DELETE CASCADE,
- flag_key VARCHAR(255) NOT NULL,
- type INTEGER DEFAULT 0 NOT NULL,
- description TEXT NOT NULL,
- rank INTEGER DEFAULT 1 NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- FOREIGN KEY (namespace_key, flag_key) REFERENCES flags (namespace_key, key) ON DELETE CASCADE
-);
-
-CREATE TABLE IF NOT EXISTS rollout_thresholds (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- namespace_key VARCHAR(255) NOT NULL REFERENCES namespaces ON DELETE CASCADE,
- rollout_id VARCHAR(255) UNIQUE NOT NULL REFERENCES rollouts ON DELETE CASCADE,
- percentage float DEFAULT 0 NOT NULL,
- value BOOLEAN DEFAULT FALSE NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS rollout_segments (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- namespace_key VARCHAR(255) NOT NULL,
- rollout_id VARCHAR(255) NOT NULL REFERENCES rollouts ON DELETE CASCADE,
- segment_key VARCHAR(255) NOT NULL,
- value BOOLEAN DEFAULT FALSE NOT NULL,
- CONSTRAINT fk_namespace_key_ref_namespaces FOREIGN KEY (namespace_key) REFERENCES namespaces ON DELETE CASCADE,
- CONSTRAINT fk_namespace_key_ref_segments FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, key) ON DELETE CASCADE
-);
diff --git a/config/migrations/cockroachdb/8_segment_anding_tables.up.sql b/config/migrations/cockroachdb/8_segment_anding_tables.up.sql
deleted file mode 100644
index 42c1e82b96..0000000000
--- a/config/migrations/cockroachdb/8_segment_anding_tables.up.sql
+++ /dev/null
@@ -1,25 +0,0 @@
-BEGIN;
--- Rules
-CREATE TABLE IF NOT EXISTS rule_segments (
- rule_id VARCHAR(255) NOT NULL REFERENCES rules ON DELETE CASCADE,
- namespace_key VARCHAR(255) NOT NULL,
- segment_key VARCHAR(255) NOT NULL,
- UNIQUE (rule_id, namespace_key, segment_key),
- CONSTRAINT fk_namespace_key_ref_segments FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, key) ON DELETE CASCADE
-);
-COMMIT;
-
-INSERT INTO rule_segments (rule_id, namespace_key, segment_key) SELECT id AS rule_id, namespace_key, segment_key FROM rules;
-
-BEGIN;
--- Rollouts
-CREATE TABLE IF NOT EXISTS rollout_segment_references (
- rollout_segment_id VARCHAR(255) NOT NULL REFERENCES rollout_segments ON DELETE CASCADE,
- namespace_key VARCHAR(255) NOT NULL,
- segment_key VARCHAR(255) NOT NULL,
- UNIQUE (rollout_segment_id, namespace_key, segment_key),
- CONSTRAINT fk_namespace_key_ref_segments FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, key) ON DELETE CASCADE
-);
-COMMIT;
-
-INSERT INTO rollout_segment_references (rollout_segment_id, namespace_key, segment_key) SELECT id AS rollout_segment_id, namespace_key, segment_key FROM rollout_segments;
diff --git a/config/migrations/cockroachdb/9_alter_rules_rollouts_segments.up.sql b/config/migrations/cockroachdb/9_alter_rules_rollouts_segments.up.sql
deleted file mode 100644
index 05b7a71d71..0000000000
--- a/config/migrations/cockroachdb/9_alter_rules_rollouts_segments.up.sql
+++ /dev/null
@@ -1,19 +0,0 @@
--- Rules
-BEGIN;
-ALTER TABLE IF EXISTS rules DROP CONSTRAINT fk_namespace_key_ref_segments;
-
-ALTER TABLE IF EXISTS rules DROP COLUMN segment_key;
-COMMIT;
-
-ALTER TABLE IF EXISTS rules ADD COLUMN segment_operator INTEGER NOT NULL DEFAULT 0;
-
--- Rollouts
-BEGIN;
-ALTER TABLE IF EXISTS rollout_segments DROP CONSTRAINT fk_namespace_key_ref_segments;
-ALTER TABLE IF EXISTS rollout_segments DROP CONSTRAINT fk_namespace_key_ref_namespaces;
-
-ALTER TABLE IF EXISTS rollout_segments DROP COLUMN segment_key;
-ALTER TABLE IF EXISTS rollout_segments DROP COLUMN namespace_key;
-COMMIT;
-
-ALTER TABLE IF EXISTS rollout_segments ADD COLUMN segment_operator INTEGER NOT NULL DEFAULT 0;
diff --git a/config/migrations/mysql/0_initial.up.sql b/config/migrations/mysql/0_initial.up.sql
deleted file mode 100644
index 380558b740..0000000000
--- a/config/migrations/mysql/0_initial.up.sql
+++ /dev/null
@@ -1,69 +0,0 @@
-CREATE TABLE IF NOT EXISTS flags (
- `key` VARCHAR(255) UNIQUE NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- enabled BOOLEAN DEFAULT FALSE NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP NOT NULL,
- PRIMARY KEY (`key`)
-);
-
-CREATE TABLE IF NOT EXISTS segments (
- `key` VARCHAR(255) UNIQUE NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- match_type INTEGER DEFAULT 0 NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP NOT NULL,
- PRIMARY KEY (`key`)
-);
-
-CREATE TABLE IF NOT EXISTS variants (
- id VARCHAR(255) UNIQUE NOT NULL,
- flag_key VARCHAR(255) NOT NULL,
- `key` VARCHAR(255) NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP NOT NULL,
- PRIMARY KEY (id),
- FOREIGN KEY (flag_key) REFERENCES flags (`key`) ON DELETE CASCADE,
- CONSTRAINT variants_flag_key_key UNIQUE (flag_key, `key`)
-);
-
-CREATE TABLE IF NOT EXISTS constraints (
- id VARCHAR(255) UNIQUE NOT NULL,
- segment_key VARCHAR(255) NOT NULL,
- type INTEGER DEFAULT 0 NOT NULL,
- property VARCHAR(255) NOT NULL,
- operator VARCHAR(255) NOT NULL,
- value TEXT NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP NOT NULL,
- PRIMARY KEY (id),
- FOREIGN KEY (segment_key) REFERENCES segments (`key`) ON DELETE CASCADE
-);
-
-CREATE TABLE IF NOT EXISTS rules (
- id VARCHAR(255) UNIQUE NOT NULL,
- flag_key VARCHAR(255) NOT NULL,
- segment_key VARCHAR(255) NOT NULL,
- `rank` INTEGER DEFAULT 1 NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP NOT NULL,
- PRIMARY KEY (id),
- FOREIGN KEY (flag_key) REFERENCES flags (`key`) ON DELETE CASCADE,
- FOREIGN KEY (segment_key) REFERENCES segments (`key`) ON DELETE CASCADE
-);
-
-CREATE TABLE IF NOT EXISTS distributions (
- id VARCHAR(255) UNIQUE NOT NULL,
- rule_id VARCHAR(255) NOT NULL,
- variant_id VARCHAR(255) NOT NULL,
- rollout float DEFAULT 0 NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP NOT NULL,
- PRIMARY KEY (id),
- FOREIGN KEY (rule_id) REFERENCES rules (id) ON DELETE CASCADE,
- FOREIGN KEY (variant_id) REFERENCES variants (id) ON DELETE CASCADE
-);
diff --git a/config/migrations/mysql/10_alter_rules_rollout_segments.up.sql b/config/migrations/mysql/10_alter_rules_rollout_segments.up.sql
deleted file mode 100644
index 55f30f3c64..0000000000
--- a/config/migrations/mysql/10_alter_rules_rollout_segments.up.sql
+++ /dev/null
@@ -1,15 +0,0 @@
--- Rules
-ALTER TABLE rules DROP FOREIGN KEY `rules_ibfk_3`;
-
-ALTER TABLE rules DROP COLUMN segment_key;
-
-ALTER TABLE rules ADD COLUMN segment_operator INTEGER NOT NULL DEFAULT 0;
-
--- Rollouts
-ALTER TABLE rollout_segments DROP FOREIGN KEY `rollout_segments_ibfk_1`;
-ALTER TABLE rollout_segments DROP FOREIGN KEY `rollout_segments_ibfk_3`;
-
-ALTER TABLE rollout_segments DROP COLUMN segment_key;
-ALTER TABLE rollout_segments DROP COLUMN namespace_key;
-
-ALTER TABLE rollout_segments ADD COLUMN segment_operator INTEGER NOT NULL DEFAULT 0;
\ No newline at end of file
diff --git a/config/migrations/mysql/11_change_timestamp_precision.up.sql b/config/migrations/mysql/11_change_timestamp_precision.up.sql
deleted file mode 100644
index 5bab0d352f..0000000000
--- a/config/migrations/mysql/11_change_timestamp_precision.up.sql
+++ /dev/null
@@ -1,17 +0,0 @@
-ALTER TABLE flags MODIFY created_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL;
-ALTER TABLE flags MODIFY updated_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL;
-
-ALTER TABLE segments MODIFY created_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL;
-ALTER TABLE segments MODIFY updated_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL;
-
-ALTER TABLE variants MODIFY created_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL;
-ALTER TABLE variants MODIFY updated_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL;
-
-ALTER TABLE constraints MODIFY created_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL;
-ALTER TABLE constraints MODIFY updated_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL;
-
-ALTER TABLE rules MODIFY created_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL;
-ALTER TABLE rules MODIFY updated_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL;
-
-ALTER TABLE distributions MODIFY created_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL;
-ALTER TABLE distributions MODIFY updated_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL;
diff --git a/config/migrations/mysql/12_namespaces_add_state_modified_at.up.sql b/config/migrations/mysql/12_namespaces_add_state_modified_at.up.sql
deleted file mode 100644
index f69d72252a..0000000000
--- a/config/migrations/mysql/12_namespaces_add_state_modified_at.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE namespaces ADD COLUMN state_modified_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP;
diff --git a/config/migrations/mysql/13_default_variant.up.sql b/config/migrations/mysql/13_default_variant.up.sql
deleted file mode 100644
index a99b270954..0000000000
--- a/config/migrations/mysql/13_default_variant.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE `flags` ADD COLUMN `default_variant_id` VARCHAR(255) REFERENCES variants(`id`) ON DELETE SET NULL;
\ No newline at end of file
diff --git a/config/migrations/mysql/14_flag_metadata.up.sql b/config/migrations/mysql/14_flag_metadata.up.sql
deleted file mode 100644
index 19661dc375..0000000000
--- a/config/migrations/mysql/14_flag_metadata.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE flags ADD COLUMN metadata JSON AFTER enabled;
diff --git a/config/migrations/mysql/15_segment_foreign_keys.up.sql b/config/migrations/mysql/15_segment_foreign_keys.up.sql
deleted file mode 100644
index 045c645c8f..0000000000
--- a/config/migrations/mysql/15_segment_foreign_keys.up.sql
+++ /dev/null
@@ -1,6 +0,0 @@
-ALTER TABLE rule_segments DROP FOREIGN KEY `rule_segments_ibfk_2`;
-ALTER TABLE rule_segments ADD CONSTRAINT `rule_segments_ibfk_2` FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, `key`) ON DELETE RESTRICT;
-
-ALTER TABLE rollout_segment_references DROP FOREIGN KEY `rollout_segment_references_ibfk_2`;
-ALTER TABLE rollout_segment_references
- ADD CONSTRAINT `rollout_segment_references_ibfk_2` FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, `key`) ON DELETE RESTRICT;
diff --git a/config/migrations/mysql/1_variants_attachment.up.sql b/config/migrations/mysql/1_variants_attachment.up.sql
deleted file mode 100644
index 0dfdb9cb26..0000000000
--- a/config/migrations/mysql/1_variants_attachment.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE variants ADD COLUMN attachment JSON AFTER description;
diff --git a/config/migrations/mysql/2_create_table_authentications.up.sql b/config/migrations/mysql/2_create_table_authentications.up.sql
deleted file mode 100644
index a1b3d50b34..0000000000
--- a/config/migrations/mysql/2_create_table_authentications.up.sql
+++ /dev/null
@@ -1,12 +0,0 @@
-CREATE TABLE IF NOT EXISTS authentications (
- id VARCHAR(255) UNIQUE NOT NULL,
- hashed_client_token VARCHAR(255) UNIQUE NOT NULL,
- method INTEGER DEFAULT 0 NOT NULL,
- metadata TEXT,
- expires_at TIMESTAMP(6),
- created_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL,
- updated_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL,
- PRIMARY KEY (`id`)
-);
-
-CREATE UNIQUE INDEX hashed_client_token_authentications_index ON authentications (hashed_client_token);
diff --git a/config/migrations/mysql/3_create_table_operation_lock.up.sql b/config/migrations/mysql/3_create_table_operation_lock.up.sql
deleted file mode 100644
index 372fb6116f..0000000000
--- a/config/migrations/mysql/3_create_table_operation_lock.up.sql
+++ /dev/null
@@ -1,7 +0,0 @@
-CREATE TABLE IF NOT EXISTS operation_lock (
- operation VARCHAR(255) UNIQUE NOT NULL,
- version INTEGER DEFAULT 0 NOT NULL,
- last_acquired_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6),
- acquired_until TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6),
- PRIMARY KEY (`operation`)
-);
diff --git a/config/migrations/mysql/4_create_namespaces.up.sql b/config/migrations/mysql/4_create_namespaces.up.sql
deleted file mode 100644
index ac2455e3a1..0000000000
--- a/config/migrations/mysql/4_create_namespaces.up.sql
+++ /dev/null
@@ -1,12 +0,0 @@
--- Create namespaces table
-CREATE TABLE IF NOT EXISTS namespaces (
- `key` VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- protected BOOLEAN DEFAULT FALSE NOT NULL,
- created_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL,
- updated_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL
-);
-
--- Create default namespace
-INSERT INTO namespaces (`key`, name, description, protected) VALUES ('default', 'Default', 'Default namespace', true);
\ No newline at end of file
diff --git a/config/migrations/mysql/5_namespaces_relationships.up.sql b/config/migrations/mysql/5_namespaces_relationships.up.sql
deleted file mode 100644
index 24c64d8d1c..0000000000
--- a/config/migrations/mysql/5_namespaces_relationships.up.sql
+++ /dev/null
@@ -1,81 +0,0 @@
--- Drop previously created foreign key
-ALTER TABLE constraints DROP FOREIGN KEY `constraints_ibfk_1`;
-ALTER TABLE rules DROP FOREIGN KEY `rules_ibfk_1`;
-ALTER TABLE rules DROP FOREIGN KEY `rules_ibfk_2`;
-ALTER TABLE variants DROP FOREIGN KEY `variants_ibfk_1`;
-
--- Flags
-
--- Add column namespace_key with a default value
-ALTER TABLE flags ADD COLUMN namespace_key VARCHAR(255) NOT NULL DEFAULT 'default';
-
--- Drop previously created unique index
-ALTER TABLE flags DROP INDEX `key`, ADD INDEX `key` (`key`) USING BTREE;
-
--- Drop primary key constraint and add a new composite primary key on namespace_key and key columns
-ALTER TABLE flags DROP PRIMARY KEY, ADD PRIMARY KEY (`namespace_key`, `key`);
-
--- Add foreign key constraint on namespace_key column referencing key column of namespaces table
-ALTER TABLE flags ADD FOREIGN KEY (namespace_key) REFERENCES namespaces(`key`) ON DELETE CASCADE;
-
--- Variants
-
--- Add column namespace_key with a default value
-ALTER TABLE variants ADD COLUMN namespace_key VARCHAR(255) NOT NULL DEFAULT 'default';
-
--- Drop previously created foreign key
-
--- Drop previously created unique index and add a new unique index on namespace_key, flag_key and key columns
-ALTER TABLE variants DROP INDEX `variants_flag_key_key`, ADD UNIQUE INDEX `variants_namespace_flag_key` (`namespace_key`, `flag_key`, `key`) USING BTREE;
-
--- Add foreign key constraint on namespace_key column referencing key column of namespaces table
-ALTER TABLE variants ADD FOREIGN KEY (namespace_key) REFERENCES namespaces(`key`) ON DELETE CASCADE;
-
--- Add foreign key constraint on namespace_key and flag_key columns referencing namespace_key and key columns of flags table
-ALTER TABLE variants ADD FOREIGN KEY (namespace_key, flag_key) REFERENCES flags(`namespace_key`, `key`) ON DELETE CASCADE;
-
--- Segments
-
--- Drop previously created unique index and add a new unique index on namespace_key and key columns
-ALTER TABLE segments DROP INDEX `key`, ADD INDEX `key` (`key`) USING BTREE;
-
--- Add column namespace_key with a default value
-ALTER TABLE segments ADD COLUMN namespace_key VARCHAR(255) NOT NULL DEFAULT 'default';
-
--- Drop primary key constraint and add a new composite primary key on namespace_key and key columns
-ALTER TABLE segments DROP PRIMARY KEY, ADD PRIMARY KEY (`namespace_key`, `key`);
-
--- Add foreign key constraint on namespace_key column referencing key column of namespaces table
-ALTER TABLE segments ADD FOREIGN KEY (namespace_key) REFERENCES namespaces(`key`) ON DELETE CASCADE;
-
--- Constraints
-
--- Add column namespace_key with a default value
-ALTER TABLE constraints ADD COLUMN namespace_key VARCHAR(255) NOT NULL DEFAULT 'default';
-
--- Drop previously created index and add a new index on namespace_key and segment_key columns
-ALTER TABLE constraints DROP INDEX `segment_key`, ADD INDEX `constraints_namespace_segment_key` (`namespace_key`, `segment_key`) USING BTREE;
-
--- Add foreign key constraint on namespace_key column referencing key column of namespaces table
-ALTER TABLE constraints ADD FOREIGN KEY (namespace_key) REFERENCES namespaces(`key`) ON DELETE CASCADE;
-
--- Add foreign key constraint on namespace_key and segment_key columns referencing namespace_key and key columns of segments table
-ALTER TABLE constraints ADD FOREIGN KEY (namespace_key, segment_key) REFERENCES segments(`namespace_key`, `key`) ON DELETE CASCADE;
-
--- Rules
-
--- Add column namespace_key with a default value
-ALTER TABLE rules ADD COLUMN namespace_key VARCHAR(255) NOT NULL DEFAULT 'default';
-
--- Drop previously created index and add a new index on namespace_key, flag_key and segment_key columns
-ALTER TABLE rules DROP INDEX `flag_key`, ADD INDEX `rules_namespace_flag_key` (`namespace_key`, `flag_key`) USING BTREE;
-ALTER TABLE rules DROP INDEX `segment_key`, ADD INDEX `rules_namespace_segment_key` (`namespace_key`, `segment_key`) USING BTREE;
-
--- Add foreign key constraint on namespace_key column referencing key column of namespaces table
-ALTER TABLE rules ADD FOREIGN KEY (namespace_key) REFERENCES namespaces(`key`) ON DELETE CASCADE;
-
--- Add foreign key constraint on namespace_key and flag_key columns referencing namespace_key and key columns of flags table
-ALTER TABLE rules ADD FOREIGN KEY (namespace_key, flag_key) REFERENCES flags(`namespace_key`, `key`) ON DELETE CASCADE;
-
--- Add foreign key constraint on namespace_key and segment_key columns referencing namespace_key and key columns of segments table
-ALTER TABLE rules ADD FOREIGN KEY (namespace_key, segment_key) REFERENCES segments(`namespace_key`, `key`) ON DELETE CASCADE;
diff --git a/config/migrations/mysql/6_constraints_with_description.up.sql b/config/migrations/mysql/6_constraints_with_description.up.sql
deleted file mode 100644
index 4b77d4c5f9..0000000000
--- a/config/migrations/mysql/6_constraints_with_description.up.sql
+++ /dev/null
@@ -1,2 +0,0 @@
--- Add description column to constraints
-ALTER TABLE constraints ADD COLUMN description TEXT;
\ No newline at end of file
diff --git a/config/migrations/mysql/7_flag_type.up.sql b/config/migrations/mysql/7_flag_type.up.sql
deleted file mode 100644
index 969aa28f7b..0000000000
--- a/config/migrations/mysql/7_flag_type.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE flags ADD COLUMN `type` INTEGER DEFAULT 0 NOT NULL;
\ No newline at end of file
diff --git a/config/migrations/mysql/8_rollouts.up.sql b/config/migrations/mysql/8_rollouts.up.sql
deleted file mode 100644
index 1a3cf20286..0000000000
--- a/config/migrations/mysql/8_rollouts.up.sql
+++ /dev/null
@@ -1,36 +0,0 @@
-CREATE TABLE IF NOT EXISTS rollouts (
- id VARCHAR(255) UNIQUE NOT NULL,
- namespace_key VARCHAR(255) NOT NULL,
- flag_key VARCHAR(255) NOT NULL,
- type INTEGER DEFAULT 0 NOT NULL,
- description TEXT NOT NULL,
- `rank` INTEGER DEFAULT 1 NOT NULL,
- created_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL,
- updated_at TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL,
- PRIMARY KEY (id),
- FOREIGN KEY (namespace_key) REFERENCES namespaces (`key`) ON DELETE CASCADE,
- FOREIGN KEY (namespace_key, flag_key) REFERENCES flags (namespace_key, `key`) ON DELETE CASCADE
-);
-
-CREATE TABLE IF NOT EXISTS rollout_thresholds (
- id VARCHAR(255) UNIQUE NOT NULL,
- namespace_key VARCHAR(255) NOT NULL,
- rollout_id VARCHAR(255) UNIQUE NOT NULL,
- percentage float DEFAULT 0 NOT NULL,
- value BOOLEAN DEFAULT FALSE NOT NULL,
- PRIMARY KEY (id),
- FOREIGN KEY (namespace_key) REFERENCES namespaces (`key`) ON DELETE CASCADE,
- FOREIGN KEY (rollout_id) REFERENCES rollouts (id) ON DELETE CASCADE
-);
-
-CREATE TABLE IF NOT EXISTS rollout_segments (
- id VARCHAR(255) UNIQUE NOT NULL,
- namespace_key VARCHAR(255) NOT NULL,
- rollout_id VARCHAR(255) NOT NULL,
- segment_key VARCHAR(255) NOT NULL,
- value BOOLEAN DEFAULT FALSE NOT NULL,
- PRIMARY KEY (id),
- FOREIGN KEY (namespace_key) REFERENCES namespaces (`key`) ON DELETE CASCADE,
- FOREIGN KEY (rollout_id) REFERENCES rollouts (id) ON DELETE CASCADE,
- FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, `key`) ON DELETE CASCADE
-);
diff --git a/config/migrations/mysql/9_segment_anding_tables.up.sql b/config/migrations/mysql/9_segment_anding_tables.up.sql
deleted file mode 100644
index 5eb77470d5..0000000000
--- a/config/migrations/mysql/9_segment_anding_tables.up.sql
+++ /dev/null
@@ -1,23 +0,0 @@
--- Rules
-CREATE TABLE IF NOT EXISTS rule_segments (
- rule_id VARCHAR(255) NOT NULL,
- namespace_key VARCHAR(255) NOT NULL,
- segment_key VARCHAR(255) NOT NULL,
- CONSTRAINT rule_id_namespace_segment UNIQUE (rule_id, namespace_key, segment_key),
- FOREIGN KEY (rule_id) REFERENCES rules (id) ON DELETE CASCADE,
- FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, `key`) ON DELETE CASCADE
-);
-
-INSERT INTO rule_segments (rule_id, namespace_key, segment_key) SELECT id AS rule_id, namespace_key, segment_key FROM rules;
-
--- Rollouts
-CREATE TABLE IF NOT EXISTS rollout_segment_references (
- rollout_segment_id VARCHAR(255) NOT NULL,
- namespace_key VARCHAR(255) NOT NULL,
- segment_key VARCHAR(255) NOT NULL,
- CONSTRAINT rollout_segment_id_namespace_segment UNIQUE (rollout_segment_id, namespace_key, segment_key),
- FOREIGN KEY (rollout_segment_id) REFERENCES rollout_segments (id) ON DELETE CASCADE,
- FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, `key`) ON DELETE CASCADE
-);
-
-INSERT INTO rollout_segment_references (rollout_segment_id, namespace_key, segment_key) SELECT id AS rollout_segment_id, namespace_key, segment_key FROM rollout_segments;
\ No newline at end of file
diff --git a/config/migrations/postgres/0_initial.up.sql b/config/migrations/postgres/0_initial.up.sql
deleted file mode 100644
index aed59f4f8a..0000000000
--- a/config/migrations/postgres/0_initial.up.sql
+++ /dev/null
@@ -1,55 +0,0 @@
-CREATE TABLE IF NOT EXISTS flags (
- key VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- enabled BOOLEAN DEFAULT FALSE NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS segments (
- key VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS variants (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- flag_key VARCHAR(255) NOT NULL REFERENCES flags ON DELETE CASCADE,
- key VARCHAR(255) UNIQUE NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS constraints (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- segment_key VARCHAR(255) NOT NULL REFERENCES segments ON DELETE CASCADE,
- type INTEGER DEFAULT 0 NOT NULL,
- property VARCHAR(255) NOT NULL,
- operator VARCHAR(255) NOT NULL,
- value TEXT NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS rules (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- flag_key VARCHAR(255) NOT NULL REFERENCES flags ON DELETE CASCADE,
- segment_key VARCHAR(255) NOT NULL REFERENCES segments ON DELETE CASCADE,
- rank INTEGER DEFAULT 1 NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS distributions (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- rule_id VARCHAR(255) NOT NULL REFERENCES rules ON DELETE CASCADE,
- variant_id VARCHAR(255) NOT NULL REFERENCES variants ON DELETE CASCADE,
- rollout float DEFAULT 0 NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
diff --git a/config/migrations/postgres/10_rollouts.up.sql b/config/migrations/postgres/10_rollouts.up.sql
deleted file mode 100644
index b09ac83213..0000000000
--- a/config/migrations/postgres/10_rollouts.up.sql
+++ /dev/null
@@ -1,28 +0,0 @@
-CREATE TABLE IF NOT EXISTS rollouts (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- namespace_key VARCHAR(255) NOT NULL REFERENCES namespaces ON DELETE CASCADE,
- flag_key VARCHAR(255) NOT NULL,
- type INTEGER DEFAULT 0 NOT NULL,
- description TEXT NOT NULL,
- rank INTEGER DEFAULT 1 NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- FOREIGN KEY (namespace_key, flag_key) REFERENCES flags (namespace_key, key) ON DELETE CASCADE
-);
-
-CREATE TABLE IF NOT EXISTS rollout_thresholds (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- namespace_key VARCHAR(255) NOT NULL REFERENCES namespaces ON DELETE CASCADE,
- rollout_id VARCHAR(255) UNIQUE NOT NULL REFERENCES rollouts ON DELETE CASCADE,
- percentage float DEFAULT 0 NOT NULL,
- value BOOLEAN DEFAULT FALSE NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS rollout_segments (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- namespace_key VARCHAR(255) NOT NULL REFERENCES namespaces ON DELETE CASCADE,
- rollout_id VARCHAR(255) NOT NULL REFERENCES rollouts ON DELETE CASCADE,
- segment_key VARCHAR(255) NOT NULL,
- value BOOLEAN DEFAULT FALSE NOT NULL,
- FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, key) ON DELETE CASCADE
-);
diff --git a/config/migrations/postgres/11_segment_anding_tables.up.sql b/config/migrations/postgres/11_segment_anding_tables.up.sql
deleted file mode 100644
index f974fb7fb7..0000000000
--- a/config/migrations/postgres/11_segment_anding_tables.up.sql
+++ /dev/null
@@ -1,21 +0,0 @@
--- Rules
-CREATE TABLE IF NOT EXISTS rule_segments (
- rule_id VARCHAR(255) NOT NULL REFERENCES rules ON DELETE CASCADE,
- namespace_key VARCHAR(255) NOT NULL,
- segment_key VARCHAR(255) NOT NULL,
- UNIQUE (rule_id, namespace_key, segment_key),
- FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, key) ON DELETE CASCADE
-);
-
-INSERT INTO rule_segments (rule_id, namespace_key, segment_key) SELECT id AS rule_id, namespace_key, segment_key FROM rules;
-
--- Rollouts
-CREATE TABLE IF NOT EXISTS rollout_segment_references (
- rollout_segment_id VARCHAR(255) NOT NULL REFERENCES rollout_segments ON DELETE CASCADE,
- namespace_key VARCHAR(255) NOT NULL,
- segment_key VARCHAR(255) NOT NULL,
- UNIQUE (rollout_segment_id, namespace_key, segment_key),
- FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, key) ON DELETE CASCADE
-);
-
-INSERT INTO rollout_segment_references (rollout_segment_id, namespace_key, segment_key) SELECT id AS rollout_segment_id, namespace_key, segment_key FROM rollout_segments;
\ No newline at end of file
diff --git a/config/migrations/postgres/12_alter_rules_rollout_segments.up.sql b/config/migrations/postgres/12_alter_rules_rollout_segments.up.sql
deleted file mode 100644
index 2b107c9a15..0000000000
--- a/config/migrations/postgres/12_alter_rules_rollout_segments.up.sql
+++ /dev/null
@@ -1,15 +0,0 @@
--- Rules
-ALTER TABLE rules DROP CONSTRAINT IF EXISTS rules_namespace_key_segment_key_fkey CASCADE;
-
-ALTER TABLE rules DROP COLUMN segment_key;
-
-ALTER TABLE rules ADD COLUMN segment_operator INTEGER NOT NULL DEFAULT 0;
-
--- Rollouts
-ALTER TABLE rollout_segments DROP CONSTRAINT IF EXISTS rollout_segments_namespace_key_fkey CASCADE;
-ALTER TABLE rollout_segments DROP CONSTRAINT IF EXISTS rollout_segments_namespace_key_segment_key_fkey CASCADE;
-
-ALTER TABLE rollout_segments DROP COLUMN segment_key;
-ALTER TABLE rollout_segments DROP COLUMN namespace_key;
-
-ALTER TABLE rollout_segments ADD COLUMN segment_operator INTEGER NOT NULL DEFAULT 0;
\ No newline at end of file
diff --git a/config/migrations/postgres/13_namespaces_add_state_modified_at.up.sql b/config/migrations/postgres/13_namespaces_add_state_modified_at.up.sql
deleted file mode 100644
index 1d8bb69620..0000000000
--- a/config/migrations/postgres/13_namespaces_add_state_modified_at.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE namespaces ADD COLUMN state_modified_at TIMESTAMP;
diff --git a/config/migrations/postgres/14_default_variant.up.sql b/config/migrations/postgres/14_default_variant.up.sql
deleted file mode 100644
index 1f6ac9c5b5..0000000000
--- a/config/migrations/postgres/14_default_variant.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE flags ADD COLUMN default_variant_id VARCHAR(255) REFERENCES variants(id) ON DELETE SET NULL;
\ No newline at end of file
diff --git a/config/migrations/postgres/15_flag_metadata.up.sql b/config/migrations/postgres/15_flag_metadata.up.sql
deleted file mode 100644
index ac21fc53a7..0000000000
--- a/config/migrations/postgres/15_flag_metadata.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE flags ADD COLUMN metadata JSON;
diff --git a/config/migrations/postgres/16_segment_foreign_keys.up.sql b/config/migrations/postgres/16_segment_foreign_keys.up.sql
deleted file mode 100644
index 64589f3c21..0000000000
--- a/config/migrations/postgres/16_segment_foreign_keys.up.sql
+++ /dev/null
@@ -1,6 +0,0 @@
-ALTER TABLE rule_segments DROP CONSTRAINT rule_segments_namespace_key_segment_key_fkey;
-ALTER TABLE rule_segments ADD CONSTRAINT rule_segments_namespace_key_segment_key_fkey FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, key) ON DELETE RESTRICT;
-
-ALTER TABLE rollout_segment_references DROP CONSTRAINT rollout_segment_references_namespace_key_segment_key_fkey;
-ALTER TABLE rollout_segment_references
- ADD CONSTRAINT rollout_segment_references_namespace_key_segment_key_fkey FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, key) ON DELETE RESTRICT;
diff --git a/config/migrations/postgres/1_variants_unique_per_flag.up.sql b/config/migrations/postgres/1_variants_unique_per_flag.up.sql
deleted file mode 100644
index b3e8b7ac07..0000000000
--- a/config/migrations/postgres/1_variants_unique_per_flag.up.sql
+++ /dev/null
@@ -1,2 +0,0 @@
-ALTER TABLE variants DROP CONSTRAINT IF EXISTS variants_key_key;
-ALTER TABLE variants ADD UNIQUE(flag_key, key);
diff --git a/config/migrations/postgres/2_segments_match_type.up.sql b/config/migrations/postgres/2_segments_match_type.up.sql
deleted file mode 100644
index c3de722fa3..0000000000
--- a/config/migrations/postgres/2_segments_match_type.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE segments ADD COLUMN match_type INTEGER DEFAULT 0 NOT NULL;
diff --git a/config/migrations/postgres/3_variants_attachment.up.sql b/config/migrations/postgres/3_variants_attachment.up.sql
deleted file mode 100644
index 3b6e157a58..0000000000
--- a/config/migrations/postgres/3_variants_attachment.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE variants ADD attachment JSONB;
diff --git a/config/migrations/postgres/4_create_table_authentications.up.sql b/config/migrations/postgres/4_create_table_authentications.up.sql
deleted file mode 100644
index b3c4c63fba..0000000000
--- a/config/migrations/postgres/4_create_table_authentications.up.sql
+++ /dev/null
@@ -1,11 +0,0 @@
-CREATE TABLE IF NOT EXISTS authentications (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- hashed_client_token VARCHAR(255) UNIQUE NOT NULL,
- method INTEGER DEFAULT 0 NOT NULL,
- metadata TEXT,
- expires_at TIMESTAMP,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-CREATE UNIQUE INDEX hashed_client_token_authentications_index ON authentications (hashed_client_token);
diff --git a/config/migrations/postgres/5_create_table_operation_lock.up.sql b/config/migrations/postgres/5_create_table_operation_lock.up.sql
deleted file mode 100644
index ca9392a10a..0000000000
--- a/config/migrations/postgres/5_create_table_operation_lock.up.sql
+++ /dev/null
@@ -1,6 +0,0 @@
-CREATE TABLE IF NOT EXISTS operation_lock (
- operation VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- version INTEGER DEFAULT 0 NOT NULL,
- last_acquired_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
- acquired_until TIMESTAMP DEFAULT CURRENT_TIMESTAMP
-);
diff --git a/config/migrations/postgres/6_create_namespaces.up.sql b/config/migrations/postgres/6_create_namespaces.up.sql
deleted file mode 100644
index ad983dc8f2..0000000000
--- a/config/migrations/postgres/6_create_namespaces.up.sql
+++ /dev/null
@@ -1,12 +0,0 @@
--- Create namespaces table
-CREATE TABLE IF NOT EXISTS namespaces (
- key VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- protected BOOLEAN DEFAULT FALSE NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
--- Create default namespace
-INSERT INTO namespaces (key, name, description, protected) VALUES ('default', 'Default', 'Default namespace', true);
diff --git a/config/migrations/postgres/7_namespaces_relationships.up.sql b/config/migrations/postgres/7_namespaces_relationships.up.sql
deleted file mode 100644
index dd2216210a..0000000000
--- a/config/migrations/postgres/7_namespaces_relationships.up.sql
+++ /dev/null
@@ -1,70 +0,0 @@
--- Flags
-----------------
-
--- Add column namespace_key with a default value
-ALTER TABLE flags ADD COLUMN namespace_key VARCHAR(255) NOT NULL DEFAULT 'default';
-
--- Add foreign key constraint on namespace_key column referencing key column of namespaces table
-ALTER TABLE flags ADD FOREIGN KEY (namespace_key) REFERENCES namespaces(key) ON DELETE CASCADE;
-
--- Drop primary key constraint and add a new composite primary key on namespace_key and key columns
-ALTER TABLE flags DROP CONSTRAINT IF EXISTS flags_pkey CASCADE;
-ALTER TABLE flags ADD CONSTRAINT flags_pkey PRIMARY KEY (namespace_key, key);
-
--- Variants
-----------------
-
--- Add column namespace_key with a default value
-ALTER TABLE variants ADD COLUMN namespace_key VARCHAR(255) NOT NULL DEFAULT 'default';
-
--- Drop previously created unique index
-ALTER TABLE variants DROP CONSTRAINT IF EXISTS variants_flag_key_key_key CASCADE;
-
--- Add unique index on namespace_key, flag_key and key columns
-ALTER TABLE variants ADD CONSTRAINT variants_namespace_flag_key UNIQUE (namespace_key, flag_key, key);
-
--- Add foreign key constraint on namespace_key column referencing key column of namespaces table
-ALTER TABLE variants ADD FOREIGN KEY (namespace_key) REFERENCES namespaces(key) ON DELETE CASCADE;
-
--- Add foreign key constraint on namespace_key and flag_key columns referencing namespace_key and key columns of flags table
-ALTER TABLE variants ADD FOREIGN KEY (namespace_key, flag_key) REFERENCES flags(namespace_key, key) ON DELETE CASCADE;
-
--- Segments
-----------------
-
--- Add column namespace_key with a default value
-ALTER TABLE segments ADD COLUMN namespace_key VARCHAR(255) NOT NULL DEFAULT 'default';
-
--- Add foreign key constraint on namespace_key column referencing key column of namespaces table
-ALTER TABLE segments ADD FOREIGN KEY (namespace_key) REFERENCES namespaces(key) ON DELETE CASCADE;
-
--- Drop primary key constraint and add a new composite primary key on namespace_key and key columns
-ALTER TABLE segments DROP CONSTRAINT IF EXISTS segments_pkey CASCADE;
-ALTER TABLE segments ADD CONSTRAINT segments_pkey PRIMARY KEY (namespace_key, key);
-
--- Constraints
-----------------
-
--- Add column namespace_key with a default value
-ALTER TABLE constraints ADD COLUMN namespace_key VARCHAR(255) NOT NULL DEFAULT 'default';
-
--- Add foreign key constraint on namespace_key column referencing key column of namespaces table
-ALTER TABLE constraints ADD FOREIGN KEY (namespace_key) REFERENCES namespaces(key) ON DELETE CASCADE;
-
--- Add foreign key constraint on namespace_key and segment_key columns referencing namespace_key and key columns of segments table
-ALTER TABLE constraints ADD FOREIGN KEY (namespace_key, segment_key) REFERENCES segments(namespace_key, key) ON DELETE CASCADE;
-
--- Rules
-----------------
-
--- Add column namespace_key with a default value
-ALTER TABLE rules ADD COLUMN namespace_key VARCHAR(255) NOT NULL DEFAULT 'default';
-
--- Add foreign key constraint on namespace_key column referencing key column of namespaces table
-ALTER TABLE rules ADD FOREIGN KEY (namespace_key) REFERENCES namespaces(key) ON DELETE CASCADE;
-
--- Add foreign key constraint on namespace_key and flag_key columns referencing namespace_key and key columns of flags table
-ALTER TABLE rules ADD FOREIGN KEY (namespace_key, flag_key) REFERENCES flags(namespace_key, key) ON DELETE CASCADE;
-
--- Add foreign key constraint on namespace_key and segment_key columns referencing namespace_key and key columns of segments table
-ALTER TABLE rules ADD FOREIGN KEY (namespace_key, segment_key) REFERENCES segments(namespace_key, key) ON DELETE CASCADE;
\ No newline at end of file
diff --git a/config/migrations/postgres/8_constraints_with_description.up.sql b/config/migrations/postgres/8_constraints_with_description.up.sql
deleted file mode 100644
index 4b77d4c5f9..0000000000
--- a/config/migrations/postgres/8_constraints_with_description.up.sql
+++ /dev/null
@@ -1,2 +0,0 @@
--- Add description column to constraints
-ALTER TABLE constraints ADD COLUMN description TEXT;
\ No newline at end of file
diff --git a/config/migrations/postgres/9_flag_type.up.sql b/config/migrations/postgres/9_flag_type.up.sql
deleted file mode 100644
index f7f32f1fe9..0000000000
--- a/config/migrations/postgres/9_flag_type.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE flags ADD COLUMN type INTEGER DEFAULT 0 NOT NULL;
\ No newline at end of file
diff --git a/config/migrations/sqlite3/0_initial.up.sql b/config/migrations/sqlite3/0_initial.up.sql
deleted file mode 100644
index aed59f4f8a..0000000000
--- a/config/migrations/sqlite3/0_initial.up.sql
+++ /dev/null
@@ -1,55 +0,0 @@
-CREATE TABLE IF NOT EXISTS flags (
- key VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- enabled BOOLEAN DEFAULT FALSE NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS segments (
- key VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS variants (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- flag_key VARCHAR(255) NOT NULL REFERENCES flags ON DELETE CASCADE,
- key VARCHAR(255) UNIQUE NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS constraints (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- segment_key VARCHAR(255) NOT NULL REFERENCES segments ON DELETE CASCADE,
- type INTEGER DEFAULT 0 NOT NULL,
- property VARCHAR(255) NOT NULL,
- operator VARCHAR(255) NOT NULL,
- value TEXT NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS rules (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- flag_key VARCHAR(255) NOT NULL REFERENCES flags ON DELETE CASCADE,
- segment_key VARCHAR(255) NOT NULL REFERENCES segments ON DELETE CASCADE,
- rank INTEGER DEFAULT 1 NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS distributions (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- rule_id VARCHAR(255) NOT NULL REFERENCES rules ON DELETE CASCADE,
- variant_id VARCHAR(255) NOT NULL REFERENCES variants ON DELETE CASCADE,
- rollout float DEFAULT 0 NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
diff --git a/config/migrations/sqlite3/10_rollouts.up.sql b/config/migrations/sqlite3/10_rollouts.up.sql
deleted file mode 100644
index b09ac83213..0000000000
--- a/config/migrations/sqlite3/10_rollouts.up.sql
+++ /dev/null
@@ -1,28 +0,0 @@
-CREATE TABLE IF NOT EXISTS rollouts (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- namespace_key VARCHAR(255) NOT NULL REFERENCES namespaces ON DELETE CASCADE,
- flag_key VARCHAR(255) NOT NULL,
- type INTEGER DEFAULT 0 NOT NULL,
- description TEXT NOT NULL,
- rank INTEGER DEFAULT 1 NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- FOREIGN KEY (namespace_key, flag_key) REFERENCES flags (namespace_key, key) ON DELETE CASCADE
-);
-
-CREATE TABLE IF NOT EXISTS rollout_thresholds (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- namespace_key VARCHAR(255) NOT NULL REFERENCES namespaces ON DELETE CASCADE,
- rollout_id VARCHAR(255) UNIQUE NOT NULL REFERENCES rollouts ON DELETE CASCADE,
- percentage float DEFAULT 0 NOT NULL,
- value BOOLEAN DEFAULT FALSE NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS rollout_segments (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- namespace_key VARCHAR(255) NOT NULL REFERENCES namespaces ON DELETE CASCADE,
- rollout_id VARCHAR(255) NOT NULL REFERENCES rollouts ON DELETE CASCADE,
- segment_key VARCHAR(255) NOT NULL,
- value BOOLEAN DEFAULT FALSE NOT NULL,
- FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, key) ON DELETE CASCADE
-);
diff --git a/config/migrations/sqlite3/11_segment_anding_tables.up.sql b/config/migrations/sqlite3/11_segment_anding_tables.up.sql
deleted file mode 100644
index a3411aa0f2..0000000000
--- a/config/migrations/sqlite3/11_segment_anding_tables.up.sql
+++ /dev/null
@@ -1,75 +0,0 @@
--- Rules
-CREATE TABLE IF NOT EXISTS rules_temp (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- flag_key VARCHAR(255) NOT NULL,
- rank INTEGER DEFAULT 1 NOT NULL,
- segment_operator INTEGER DEFAULT 0 NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- namespace_key VARCHAR(255) NOT NULL DEFAULT 'default' REFERENCES namespaces ON DELETE CASCADE,
- FOREIGN KEY (namespace_key, flag_key) REFERENCES flags (namespace_key, key) ON DELETE CASCADE
-);
-
-INSERT INTO rules_temp (id, flag_key, rank, created_at, updated_at, namespace_key) SELECT id, flag_key, rank, created_at, updated_at, namespace_key FROM rules;
-
--- Copy data from distributions table to temporary distributions table since distributions depends on rules
-CREATE TABLE distributions_temp (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- rule_id VARCHAR(255) NOT NULL REFERENCES rules_temp ON DELETE CASCADE,
- variant_id VARCHAR(255) NOT NULL REFERENCES variants ON DELETE CASCADE,
- rollout float DEFAULT 0 NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-INSERT INTO distributions_temp (id, rule_id, variant_id, rollout, created_at, updated_at)
- SELECT id, rule_id, variant_id, rollout, created_at, updated_at
- FROM distributions;
-
-CREATE TABLE IF NOT EXISTS rule_segments (
- rule_id VARCHAR(255) NOT NULL REFERENCES rules_temp ON DELETE CASCADE,
- namespace_key VARCHAR(255) NOT NULL,
- segment_key VARCHAR(255) NOT NULL,
- UNIQUE (rule_id, namespace_key, segment_key),
- FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, key) ON DELETE CASCADE
-);
-
-INSERT INTO rule_segments (rule_id, namespace_key, segment_key) SELECT id AS rule_id, namespace_key, segment_key FROM rules;
-
--- Rollouts
-CREATE TABLE IF NOT EXISTS rollout_segments_temp (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- rollout_id VARCHAR(255) NOT NULL REFERENCES rollouts ON DELETE CASCADE,
- value BOOLEAN DEFAULT FALSE NOT NULL,
- segment_operator INTEGER DEFAULT 0 NOT NULL
-);
-
-INSERT INTO rollout_segments_temp (id, rollout_id, value) SELECT id, rollout_id, value FROM rollout_segments;
-
-CREATE TABLE IF NOT EXISTS rollout_segment_references (
- rollout_segment_id VARCHAR(255) NOT NULL REFERENCES rollout_segments_temp ON DELETE CASCADE,
- namespace_key VARCHAR(255) NOT NULL,
- segment_key VARCHAR(255) NOT NULL,
- UNIQUE (rollout_segment_id, namespace_key, segment_key),
- FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, key) ON DELETE CASCADE
-);
-
-INSERT INTO rollout_segment_references (rollout_segment_id, namespace_key, segment_key) SELECT id AS rollout_segment_id, namespace_key, segment_key FROM rollout_segments;
-
--- Drop old rules table
-DROP TABLE rules;
-
--- Rename temporary rules table to rules
-ALTER TABLE rules_temp RENAME TO rules;
-
--- Drop old rollout_segments table
-DROP TABLE rollout_segments;
-
--- Rename temporary rollout_segments table to rollout_segments
-ALTER TABLE rollout_segments_temp RENAME TO rollout_segments;
-
--- Drop distributions table
-DROP TABLE distributions;
-
--- Rename distributions
-ALTER TABLE distributions_temp RENAME TO distributions;
\ No newline at end of file
diff --git a/config/migrations/sqlite3/12_namespaces_add_state_modified_at.up.sql b/config/migrations/sqlite3/12_namespaces_add_state_modified_at.up.sql
deleted file mode 100644
index 1d8bb69620..0000000000
--- a/config/migrations/sqlite3/12_namespaces_add_state_modified_at.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE namespaces ADD COLUMN state_modified_at TIMESTAMP;
diff --git a/config/migrations/sqlite3/13_default_variant.up.sql b/config/migrations/sqlite3/13_default_variant.up.sql
deleted file mode 100644
index a99b270954..0000000000
--- a/config/migrations/sqlite3/13_default_variant.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE `flags` ADD COLUMN `default_variant_id` VARCHAR(255) REFERENCES variants(`id`) ON DELETE SET NULL;
\ No newline at end of file
diff --git a/config/migrations/sqlite3/14_flag_metadata.up.sql b/config/migrations/sqlite3/14_flag_metadata.up.sql
deleted file mode 100644
index ac21fc53a7..0000000000
--- a/config/migrations/sqlite3/14_flag_metadata.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE flags ADD COLUMN metadata JSON;
diff --git a/config/migrations/sqlite3/15_segment_foreign_keys.up.sql b/config/migrations/sqlite3/15_segment_foreign_keys.up.sql
deleted file mode 100644
index 5dcd5af306..0000000000
--- a/config/migrations/sqlite3/15_segment_foreign_keys.up.sql
+++ /dev/null
@@ -1,25 +0,0 @@
--- rule_segments
-CREATE TABLE rule_segments_temp (
- rule_id VARCHAR(255) NOT NULL REFERENCES rules ON DELETE CASCADE,
- namespace_key VARCHAR(255) NOT NULL,
- segment_key VARCHAR(255) NOT NULL,
- UNIQUE (rule_id, namespace_key, segment_key),
- FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, key) ON DELETE RESTRICT
-);
-
-INSERT INTO rule_segments_temp (rule_id, namespace_key, segment_key) SELECT rule_id, namespace_key, segment_key FROM rule_segments;
-DROP TABLE rule_segments;
-ALTER TABLE rule_segments_temp RENAME TO rule_segments;
-
--- rollout_segment_references
-CREATE TABLE rollout_segment_references_temp (
- rollout_segment_id VARCHAR(255) NOT NULL REFERENCES rollout_segments ON DELETE CASCADE,
- namespace_key VARCHAR(255) NOT NULL,
- segment_key VARCHAR(255) NOT NULL,
- UNIQUE (rollout_segment_id, namespace_key, segment_key),
- FOREIGN KEY (namespace_key, segment_key) REFERENCES segments (namespace_key, key) ON DELETE RESTRICT
-);
-
-INSERT INTO rollout_segment_references_temp (rollout_segment_id, namespace_key, segment_key) SELECT rollout_segment_id, namespace_key, segment_key FROM rollout_segment_references;
-DROP TABLE rollout_segment_references;
-ALTER TABLE rollout_segment_references_temp RENAME TO rollout_segment_references;
diff --git a/config/migrations/sqlite3/1_variants_unique_per_flag.up.sql b/config/migrations/sqlite3/1_variants_unique_per_flag.up.sql
deleted file mode 100644
index a442e362e4..0000000000
--- a/config/migrations/sqlite3/1_variants_unique_per_flag.up.sql
+++ /dev/null
@@ -1,28 +0,0 @@
-/* SQLite doesn't allow you to drop unique constraints with ALTER TABLE
- so we have to create a new table with the schema we want and copy the data over.
- https://www.sqlite.org/lang_altertable.html
-*/
-
-PRAGMA foreign_keys=off;
-
-CREATE TABLE variants_temp
-(
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- flag_key VARCHAR(255) NOT NULL REFERENCES flags ON DELETE CASCADE,
- key VARCHAR(255) NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- UNIQUE (flag_key, key)
-);
-
-INSERT INTO variants_temp (id, flag_key, key, name, description, created_at, updated_at)
- SELECT id, flag_key, key, name, description, created_at, updated_at
- FROM variants;
-
-DROP TABLE variants;
-
-ALTER TABLE variants_temp RENAME TO variants;
-
-PRAGMA foreign_keys=on;
diff --git a/config/migrations/sqlite3/2_segments_match_type.up.sql b/config/migrations/sqlite3/2_segments_match_type.up.sql
deleted file mode 100644
index c3de722fa3..0000000000
--- a/config/migrations/sqlite3/2_segments_match_type.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE segments ADD COLUMN match_type INTEGER DEFAULT 0 NOT NULL;
diff --git a/config/migrations/sqlite3/3_variants_attachment.up.sql b/config/migrations/sqlite3/3_variants_attachment.up.sql
deleted file mode 100644
index b8b5a58a97..0000000000
--- a/config/migrations/sqlite3/3_variants_attachment.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE variants ADD COLUMN attachment TEXT AFTER description;
diff --git a/config/migrations/sqlite3/4_create_table_authentications.up.sql b/config/migrations/sqlite3/4_create_table_authentications.up.sql
deleted file mode 100644
index b3c4c63fba..0000000000
--- a/config/migrations/sqlite3/4_create_table_authentications.up.sql
+++ /dev/null
@@ -1,11 +0,0 @@
-CREATE TABLE IF NOT EXISTS authentications (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- hashed_client_token VARCHAR(255) UNIQUE NOT NULL,
- method INTEGER DEFAULT 0 NOT NULL,
- metadata TEXT,
- expires_at TIMESTAMP,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-CREATE UNIQUE INDEX hashed_client_token_authentications_index ON authentications (hashed_client_token);
diff --git a/config/migrations/sqlite3/5_create_table_operation_lock.up.sql b/config/migrations/sqlite3/5_create_table_operation_lock.up.sql
deleted file mode 100644
index ca9392a10a..0000000000
--- a/config/migrations/sqlite3/5_create_table_operation_lock.up.sql
+++ /dev/null
@@ -1,6 +0,0 @@
-CREATE TABLE IF NOT EXISTS operation_lock (
- operation VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- version INTEGER DEFAULT 0 NOT NULL,
- last_acquired_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
- acquired_until TIMESTAMP DEFAULT CURRENT_TIMESTAMP
-);
diff --git a/config/migrations/sqlite3/6_create_namespaces.up.sql b/config/migrations/sqlite3/6_create_namespaces.up.sql
deleted file mode 100644
index f63b307aff..0000000000
--- a/config/migrations/sqlite3/6_create_namespaces.up.sql
+++ /dev/null
@@ -1,12 +0,0 @@
--- Create namespaces table
-CREATE TABLE IF NOT EXISTS namespaces (
- key VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- protected BOOLEAN DEFAULT FALSE NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
--- Create default namespace
-INSERT INTO namespaces (key, name, description, protected) VALUES ('default', 'Default', 'Default namespace', true);
\ No newline at end of file
diff --git a/config/migrations/sqlite3/7_namespaces_relationships.up.sql b/config/migrations/sqlite3/7_namespaces_relationships.up.sql
deleted file mode 100644
index e04e6d45e6..0000000000
--- a/config/migrations/sqlite3/7_namespaces_relationships.up.sql
+++ /dev/null
@@ -1,134 +0,0 @@
--- Create temporary flags table
-CREATE TABLE IF NOT EXISTS flags_temp (
- key VARCHAR(255) NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- enabled BOOLEAN DEFAULT FALSE NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- namespace_key VARCHAR(255) NOT NULL DEFAULT 'default' REFERENCES namespaces ON DELETE CASCADE,
- PRIMARY KEY (namespace_key, key)
-);
-
--- Copy data from flags table to temporary flags table
-INSERT INTO flags_temp (key, name, description, enabled, created_at, updated_at)
- SELECT key, name, description, enabled, created_at, updated_at
- FROM flags;
-
--- Create temporary variants table
-CREATE TABLE IF NOT EXISTS variants_temp (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- flag_key VARCHAR(255) NOT NULL,
- key VARCHAR(255) NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- attachment TEXT,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- namespace_key VARCHAR(255) NOT NULL DEFAULT 'default' REFERENCES namespaces ON DELETE CASCADE,
- UNIQUE (namespace_key, flag_key, key),
- FOREIGN KEY (namespace_key, flag_key) REFERENCES flags_temp (namespace_key, key) ON DELETE CASCADE
-);
-
--- Copy data from variants table to temporary variants table
-INSERT INTO variants_temp (id, flag_key, key, name, description, attachment, created_at, updated_at)
- SELECT id, flag_key, key, name, description, attachment, created_at, updated_at
- FROM variants;
-
--- Create temporary segments table
-CREATE TABLE IF NOT EXISTS segments_temp (
- key VARCHAR(255) NOT NULL,
- name VARCHAR(255) NOT NULL,
- description TEXT NOT NULL,
- match_type INTEGER DEFAULT 0 NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- namespace_key VARCHAR(255) NOT NULL DEFAULT 'default' REFERENCES namespaces ON DELETE CASCADE,
- PRIMARY KEY (namespace_key, key)
-);
-
--- Copy data from segments table to temporary segments table
-INSERT INTO segments_temp (key, name, description, match_type, created_at, updated_at)
- SELECT key, name, description, match_type, created_at, updated_at
- FROM segments;
-
--- Create temporary constraints table
-CREATE TABLE IF NOT EXISTS constraints_temp (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- segment_key VARCHAR(255) NOT NULL,
- type INTEGER DEFAULT 0 NOT NULL,
- property VARCHAR(255) NOT NULL,
- operator VARCHAR(255) NOT NULL,
- value TEXT NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- namespace_key VARCHAR(255) NOT NULL DEFAULT 'default' REFERENCES namespaces ON DELETE CASCADE,
- FOREIGN KEY (namespace_key, segment_key) REFERENCES segments_temp (namespace_key, key) ON DELETE CASCADE
-);
-
--- Copy data from constraints table to temporary constraints table
-INSERT INTO constraints_temp (id, segment_key, type, property, operator, value, created_at, updated_at)
- SELECT id, segment_key, type, property, operator, value, created_at, updated_at
- FROM constraints;
-
--- Create temporary rules table
-CREATE TABLE IF NOT EXISTS rules_temp (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- flag_key VARCHAR(255) NOT NULL,
- segment_key VARCHAR(255) NOT NULL,
- rank INTEGER DEFAULT 1 NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- namespace_key VARCHAR(255) NOT NULL DEFAULT 'default' REFERENCES namespaces ON DELETE CASCADE,
- FOREIGN KEY (namespace_key, flag_key) REFERENCES flags_temp (namespace_key, key) ON DELETE CASCADE,
- FOREIGN KEY (namespace_key, segment_key) REFERENCES segments_temp (namespace_key, key) ON DELETE CASCADE
-);
-
--- Copy data from rules table to temporary rules table
-INSERT INTO rules_temp (id, flag_key, segment_key, rank, created_at, updated_at)
- SELECT id, flag_key, segment_key, rank, created_at, updated_at
- FROM rules;
-
--- Copy data from distributions table to temporary distributions table
-CREATE TABLE distributions_temp (
- id VARCHAR(255) PRIMARY KEY UNIQUE NOT NULL,
- rule_id VARCHAR(255) NOT NULL REFERENCES rules_temp ON DELETE CASCADE,
- variant_id VARCHAR(255) NOT NULL REFERENCES variants_temp ON DELETE CASCADE,
- rollout float DEFAULT 0 NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
-);
-
-INSERT INTO distributions_temp (id, rule_id, variant_id, rollout, created_at, updated_at)
- SELECT id, rule_id, variant_id, rollout, created_at, updated_at
- FROM distributions;
-
--- Drop old distributions table
-DROP TABLE distributions;
--- Rename temporary distributions table to distributions
-ALTER TABLE distributions_temp RENAME TO distributions;
-
--- Drop old rules table
-DROP TABLE rules;
--- Rename temporary rules table to rules
-ALTER TABLE rules_temp RENAME TO rules;
-
--- Drop old flags table
-DROP TABLE flags;
--- Rename temporary flags table to flags
-ALTER TABLE flags_temp RENAME TO flags;
-
--- Drop old variants table
-DROP TABLE variants;
--- Rename temporary variants table to variants
-ALTER TABLE variants_temp RENAME TO variants;
-
--- Drop old segments table
-DROP TABLE segments;
--- Rename temporary segments table to segments
-ALTER TABLE segments_temp RENAME TO segments;
-
--- Drop old constraints table
-DROP TABLE constraints;
--- Rename temporary constraints table to constraints
-ALTER TABLE constraints_temp RENAME TO constraints;
diff --git a/config/migrations/sqlite3/8_constraints_with_description.up.sql b/config/migrations/sqlite3/8_constraints_with_description.up.sql
deleted file mode 100644
index 4b77d4c5f9..0000000000
--- a/config/migrations/sqlite3/8_constraints_with_description.up.sql
+++ /dev/null
@@ -1,2 +0,0 @@
--- Add description column to constraints
-ALTER TABLE constraints ADD COLUMN description TEXT;
\ No newline at end of file
diff --git a/config/migrations/sqlite3/9_flag_type.up.sql b/config/migrations/sqlite3/9_flag_type.up.sql
deleted file mode 100644
index 969aa28f7b..0000000000
--- a/config/migrations/sqlite3/9_flag_type.up.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER TABLE flags ADD COLUMN `type` INTEGER DEFAULT 0 NOT NULL;
\ No newline at end of file
diff --git a/examples/database/README.md b/examples/database/README.md
deleted file mode 100644
index c744062f7b..0000000000
--- a/examples/database/README.md
+++ /dev/null
@@ -1,12 +0,0 @@
-# Database Examples
-
-This directory contains examples of how to setup Flipt to use different databases.
-
-For more information on the different database configurations, see the [Storage](https://www.flipt.io/docs/configuration/storage) documentation.
-
-## Contents
-
-* [CockroachDB Example](cockroachdb/README.md)
-* [MySQL Example](mysql/README.md)
-* [PostgreSQL Example](postgres/README.md)
-* [LibSQL Example](libsql/README.md)
diff --git a/examples/database/cockroachdb/README.md b/examples/database/cockroachdb/README.md
deleted file mode 100644
index 577235e89b..0000000000
--- a/examples/database/cockroachdb/README.md
+++ /dev/null
@@ -1,25 +0,0 @@
-
-
-
-
-# CockroachDB Example
-
-This example shows how you can run Flipt with a CockroachDB database over the default SQLite.
-
-This works by setting the environment variable `FLIPT_DB_URL` to point to the CockroachDB database running in a container:
-
-```bash
-FLIPT_DB_URL=cockroach://root@crdb:26257/defaultdb?sslmode=disable
-```
-
-## Requirements
-
-To run this example application you'll need:
-
-* [Docker](https://docs.docker.com/install/)
-* [docker-compose](https://docs.docker.com/compose/install/)
-
-## Running the Example
-
-1. Run `docker compose up` from this directory
-1. Open the Flipt UI (default: [http://localhost:8080](http://localhost:8080))
diff --git a/examples/database/cockroachdb/docker-compose.yml b/examples/database/cockroachdb/docker-compose.yml
deleted file mode 100644
index 9eb744b875..0000000000
--- a/examples/database/cockroachdb/docker-compose.yml
+++ /dev/null
@@ -1,35 +0,0 @@
-version: "3"
-
-services:
- crdb:
- image: cockroachdb/cockroach:latest-v24.2
- networks:
- - flipt_network
- ports:
- - "26257:26257"
- command: start-single-node --insecure --accept-sql-without-tls
- volumes:
- - "${PWD}/data:/cockroach/cockroach-data"
- healthcheck:
- test: ["CMD", "curl", "-f", "http://localhost:8080/health?ready=1"]
- interval: 3s
- timeout: 3s
- retries: 5
-
- flipt:
- image: flipt/flipt:latest
- depends_on:
- crdb:
- condition: service_healthy
- ports:
- - "8080:8080"
- networks:
- - flipt_network
- environment:
- - FLIPT_DB_URL=cockroach://root@crdb:26257/defaultdb?sslmode=disable
- - FLIPT_LOG_LEVEL=debug
- - FLIPT_META_TELEMETRY_ENABLED=false
- command: ["/flipt", "--force-migrate"]
-
-networks:
- flipt_network:
diff --git a/examples/database/libsql/README.md b/examples/database/libsql/README.md
deleted file mode 100644
index 0ca461d39c..0000000000
--- a/examples/database/libsql/README.md
+++ /dev/null
@@ -1,32 +0,0 @@
-# LibSQL
-
-[LibSQL](https://github.com/tursodatabase/libsql) was created as a fork of SQLite by [Turso](https://turso.tech/) to fit some use cases that SQLite was not originally designed for.
-
-It's fully compatible with the SQLite API, and has the added benefit of being run behind an HTTP interface in a service called [sqld](https://github.com/tursodatabase/libsql/tree/main/libsql-server).
-
-## Requirements
-
-- [Docker](https://www.docker.com/)
-- [docker-compose](https://docs.docker.com/compose/install/)
-
-## Running the Example
-
-1. Run `docker-compose up` from this directory
-1. Open the `flipt-one` UI ([http://localhost:8080](http://localhost:8080)) or `flipt-two` UI ([http://localhost:8081](http://localhost:8081))
-1. Create a new feature flag in either UI
-1. Switch to the other UI and verify that the feature flag was replicated
-1. Continue to modify data in either UI and verify that the data is replicated
-
-## Details
-
-`docker compose` will spin up two instances of Flipt (named both `flipt-one` and `flipt-two`). We also spin up two instances of `sqld` called `sqld-primary` and `sqld-replica`. All writes will be directed to the `sqld-primary` and data will be replicated to the `sqld-replica` per semantics of `sqld`.
-
-
-
-> The diagram above was taken from the [libsql](https://github.com/tursodatabase/libsql) repository itself, but gives a nice overview on how all the concepts mesh together.
-
-## Data
-
-Since we mount the directories `/tmp/data.db` and `/tmp/replica.db` as volumes to the `sqld` Docker containers, you can explore the data on the host using the [sqlite3](https://www.sqlite.org/download.html) CLI.
-
-The data will live under `/tmp/data.db/dbs/default/data` for the `sqld-primary` instance and `/tmp/replica.db/dbs/default/data` for the `sqld-replica` instance.
diff --git a/examples/database/libsql/docker-compose.yml b/examples/database/libsql/docker-compose.yml
deleted file mode 100644
index 4d3523cf78..0000000000
--- a/examples/database/libsql/docker-compose.yml
+++ /dev/null
@@ -1,42 +0,0 @@
-version: "3"
-services:
- sqld-primary:
- image: ghcr.io/libsql/sqld:main
- platform: linux/amd64
- environment:
- - SQLD_NODE=primary
- - SQLD_HTTP_LISTEN_ADDR=0.0.0.0:8000
- - SQLD_GRPC_LISTEN_ADDR=0.0.0.0:5000
- volumes:
- - "/tmp/data.db:/var/lib/sqld/iku.db"
-
- sqld-replica:
- image: ghcr.io/libsql/sqld:main
- platform: linux/amd64
- depends_on:
- - sqld-primary
- environment:
- - SQLD_NODE=replica
- - SQLD_PRIMARY_URL=http://sqld-primary:5000
- volumes:
- - "/tmp/replica.db:/var/lib/sqld/iku.db"
-
- flipt-one:
- image: flipt/flipt:nightly
- depends_on:
- - sqld-primary
- environment:
- - FLIPT_DB_URL=http://sqld-primary:8000
- restart: on-failure
- ports:
- - "8080:8080"
-
- flipt-two:
- image: flipt/flipt:nightly
- depends_on:
- - sqld-replica
- restart: on-failure
- environment:
- - FLIPT_DB_URL=http://sqld-replica:8080
- ports:
- - "8081:8080"
diff --git a/examples/database/libsql/images/sqld-overview.png b/examples/database/libsql/images/sqld-overview.png
deleted file mode 100644
index eaf56c4dac..0000000000
Binary files a/examples/database/libsql/images/sqld-overview.png and /dev/null differ
diff --git a/examples/database/litefs/Dockerfile b/examples/database/litefs/Dockerfile
deleted file mode 100644
index 4afde41454..0000000000
--- a/examples/database/litefs/Dockerfile
+++ /dev/null
@@ -1,16 +0,0 @@
-FROM alpine
-
-COPY --from=flyio/litefs:0.5 /usr/local/bin/litefs /usr/local/bin/litefs
-COPY --from=flipt/flipt:latest /flipt /usr/local/bin/flipt
-
-RUN mkdir -p /etc/flipt/config && \
- mkdir -p /var/opt/flipt
-
-COPY --from=flipt/flipt:latest /etc/flipt/config/default.yml /etc/flipt/config/default.yml
-RUN chown -R root:root /var/opt/flipt /etc/flipt
-
-ADD litefs.yml /etc/litefs.yml
-
-RUN apk add bash fuse3 sqlite ca-certificates
-
-ENTRYPOINT litefs mount
diff --git a/examples/database/litefs/README.md b/examples/database/litefs/README.md
deleted file mode 100644
index ee975a8aaf..0000000000
--- a/examples/database/litefs/README.md
+++ /dev/null
@@ -1,25 +0,0 @@
-# LiteFS
-
-LiteFS is a distributed file system that replicates SQLite databases to other nodes at the file system level. It works by using [FUSE](https://www.kernel.org/doc/html/next/filesystems/fuse.html) to detect writes to the file system and determines how and if those should be replicated.
-
-This example will demonstrate how to run Flipt over LiteFS.
-
-## Requirements
-
-- [Docker](https://www.docker.com/)
-- [docker-compose](https://docs.docker.com/compose/install/)
-
-## Running the Example
-
-1. Run `docker-compose up` from this directory
-1. Open the Flipt UI (default: [http://localhost:8080](http://localhost:8080))
-
-## Details
-
-`docker compose` will spin up two instances of Flipt with embedded SQLite databases. On top of these instances is an nginx proxy that will forward "write" requests, anything but `GET`, to the primary. `GET` requests will be served from the instance's embedded SQLite database.
-
-LiteFS describes a few [caveats](https://fly.io/docs/litefs/proxy/#how-it-works) that must be kept in mind.
-
-## Data
-
-You can view the data on any one of the instances located at [primary](http://localhost:8081) or the [replica](http://localhost:8082), but remember that writes will only happen on the primary or the nginx proxy, the replica instance will not be able to accpet writes.
diff --git a/examples/database/litefs/docker-compose.yml b/examples/database/litefs/docker-compose.yml
deleted file mode 100644
index f21a55ed75..0000000000
--- a/examples/database/litefs/docker-compose.yml
+++ /dev/null
@@ -1,21 +0,0 @@
-version: '3'
-services:
- nginx:
- build: ./nginx
- ports:
- - "8080:80"
- primary:
- build: .
- privileged: true
- ports:
- - "8081:8080"
- environment:
- IS_PRIMARY: "true"
- replica:
- build: .
- ports:
- - "8082:8080"
- restart: on-failure
- privileged: true
- environment:
- IS_PRIMARY: "false"
diff --git a/examples/database/litefs/litefs.yml b/examples/database/litefs/litefs.yml
deleted file mode 100644
index 97076e3348..0000000000
--- a/examples/database/litefs/litefs.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-log:
- debug: true
-
-fuse:
- dir: "/var/opt/flipt"
-
-data:
- dir: "/var/lib/litefs"
-
-lease:
- type: "static"
-
- advertise-url: "http://primary:20202"
-
- candidate: $IS_PRIMARY
-
-exec:
- - cmd: "flipt"
diff --git a/examples/database/litefs/nginx/Dockerfile b/examples/database/litefs/nginx/Dockerfile
deleted file mode 100644
index 5986f479e0..0000000000
--- a/examples/database/litefs/nginx/Dockerfile
+++ /dev/null
@@ -1,5 +0,0 @@
-FROM nginx:alpine
-
-COPY ./nginx.conf /etc/nginx/nginx.conf
-EXPOSE 80
-CMD ["nginx", "-g", "daemon off;"]
diff --git a/examples/database/litefs/nginx/nginx.conf b/examples/database/litefs/nginx/nginx.conf
deleted file mode 100644
index a315313f9c..0000000000
--- a/examples/database/litefs/nginx/nginx.conf
+++ /dev/null
@@ -1,22 +0,0 @@
-http {
- upstream primary {
- server primary:8080;
- }
- upstream all {
- server primary:8080;
- server replica:8080;
- }
- server {
- listen 80;
- location / {
- # Basically all writes should go to this primary,
- # while reads go to everything else.
- if ($request_method ~ "(PUT|POST|PATCH|DELETE)") {
- proxy_pass http://primary;
- }
- proxy_pass http://all;
- }
- }
-}
-
-events { }
diff --git a/examples/database/mysql/README.md b/examples/database/mysql/README.md
deleted file mode 100644
index b38e5da688..0000000000
--- a/examples/database/mysql/README.md
+++ /dev/null
@@ -1,25 +0,0 @@
-
-
-
-
-# MySQL Example
-
-This example shows how you can run Flipt with a MySQL database over the default SQLite.
-
-This works by setting the environment variable `FLIPT_DB_URL` to point to the MySQL database running in a container:
-
-```bash
-FLIPT_DB_URL=mysql://mysql:password@mysql:3306/flipt
-```
-
-## Requirements
-
-To run this example application you'll need:
-
-* [Docker](https://docs.docker.com/install/)
-* [docker-compose](https://docs.docker.com/compose/install/)
-
-## Running the Example
-
-1. Run `docker compose up` from this directory
-1. Open the Flipt UI (default: [http://localhost:8080](http://localhost:8080))
diff --git a/examples/database/mysql/docker-compose.yml b/examples/database/mysql/docker-compose.yml
deleted file mode 100644
index 6494306de6..0000000000
--- a/examples/database/mysql/docker-compose.yml
+++ /dev/null
@@ -1,34 +0,0 @@
-version: "3"
-
-services:
- mysql:
- image: mysql:latest
- networks:
- - flipt_network
- environment:
- - MYSQL_DATABASE=flipt
- - MYSQL_USER=mysql
- - MYSQL_PASSWORD=password
- - MYSQL_ALLOW_EMPTY_PASSWORD=true
- healthcheck:
- test: ["CMD", "mysqladmin", "ping", "-h", "localhost"]
- timeout: 5s
- retries: 5
-
- flipt:
- image: flipt/flipt:latest
- depends_on:
- mysql:
- condition: service_healthy
- ports:
- - "8080:8080"
- networks:
- - flipt_network
- environment:
- - FLIPT_DB_URL=mysql://mysql:password@mysql:3306/flipt
- - FLIPT_LOG_LEVEL=debug
- - FLIPT_META_TELEMETRY_ENABLED=false
- command: ["/flipt", "--force-migrate"]
-
-networks:
- flipt_network:
diff --git a/examples/database/postgres/README.md b/examples/database/postgres/README.md
deleted file mode 100644
index 82212e25d5..0000000000
--- a/examples/database/postgres/README.md
+++ /dev/null
@@ -1,25 +0,0 @@
-
-
-
-
-# Postgres Example
-
-This example shows how you can run Flipt with a Postgres database over the default SQLite.
-
-This works by setting the environment variable `FLIPT_DB_URL` to point to the Postgres database running in a container:
-
-```bash
-FLIPT_DB_URL=postgres://postgres:password@postgres:5432/flipt?sslmode=disable
-```
-
-## Requirements
-
-To run this example application you'll need:
-
-* [Docker](https://docs.docker.com/install/)
-* [docker-compose](https://docs.docker.com/compose/install/)
-
-## Running the Example
-
-1. Run `docker compose up` from this directory
-1. Open the Flipt UI (default: [http://localhost:8080](http://localhost:8080))
diff --git a/examples/database/postgres/docker-compose.yml b/examples/database/postgres/docker-compose.yml
deleted file mode 100644
index 08728365be..0000000000
--- a/examples/database/postgres/docker-compose.yml
+++ /dev/null
@@ -1,34 +0,0 @@
-version: "3"
-
-services:
- postgres:
- image: postgres:alpine
- networks:
- - flipt_network
- environment:
- - POSTGRES_DB=flipt
- - POSTGRES_USER=postgres
- - POSTGRES_PASSWORD=password
- healthcheck:
- test: ["CMD-SHELL", "pg_isready"]
- interval: 5s
- timeout: 5s
- retries: 5
-
- flipt:
- image: flipt/flipt:latest
- depends_on:
- postgres:
- condition: service_healthy
- ports:
- - "8080:8080"
- networks:
- - flipt_network
- environment:
- - FLIPT_DB_URL=postgres://postgres:password@postgres:5432/flipt?sslmode=disable
- - FLIPT_LOG_LEVEL=debug
- - FLIPT_META_TELEMETRY_ENABLED=false
- command: ["/flipt", "--force-migrate"]
-
-networks:
- flipt_network:
diff --git a/examples/images/logos/cockroachdb.svg b/examples/images/logos/cockroachdb.svg
deleted file mode 100644
index 6e8eca8e06..0000000000
--- a/examples/images/logos/cockroachdb.svg
+++ /dev/null
@@ -1 +0,0 @@
-CL
\ No newline at end of file
diff --git a/examples/images/logos/mysql.svg b/examples/images/logos/mysql.svg
deleted file mode 100644
index 853c55f150..0000000000
--- a/examples/images/logos/mysql.svg
+++ /dev/null
@@ -1 +0,0 @@
-
\ No newline at end of file
diff --git a/examples/images/logos/sqlite.svg b/examples/images/logos/sqlite.svg
deleted file mode 100644
index 670aa6eb82..0000000000
--- a/examples/images/logos/sqlite.svg
+++ /dev/null
@@ -1 +0,0 @@
-
\ No newline at end of file
diff --git a/go.mod b/go.mod
index cdab1eff15..c529619b56 100644
--- a/go.mod
+++ b/go.mod
@@ -12,11 +12,7 @@ require (
github.com/ClickHouse/clickhouse-go/v2 v2.29.0
github.com/MakeNowJust/heredoc v1.0.0
github.com/Masterminds/semver/v3 v3.3.1
- github.com/Masterminds/squirrel v1.5.4
- github.com/XSAM/otelsql v0.36.0
github.com/aws/aws-sdk-go-v2/config v1.28.6
- github.com/aws/aws-sdk-go-v2/service/ecr v1.38.0
- github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.29.1
github.com/aws/aws-sdk-go-v2/service/s3 v1.71.0
github.com/blang/semver/v4 v4.0.0
github.com/coreos/go-oidc/v3 v3.12.0
@@ -28,12 +24,10 @@ require (
github.com/go-git/go-git/v5 v5.13.1
github.com/go-jose/go-jose/v3 v3.0.3
github.com/go-redis/cache/v9 v9.0.0
- github.com/go-sql-driver/mysql v1.8.1
github.com/gobwas/glob v0.2.3
github.com/golang-migrate/migrate/v4 v4.18.1
github.com/golang/protobuf v1.5.4
github.com/google/go-cmp v0.6.0
- github.com/google/go-containerregistry v0.20.2
github.com/google/go-github/v66 v66.0.0
github.com/google/uuid v1.6.0
github.com/gorilla/csrf v1.7.2
@@ -48,30 +42,23 @@ require (
github.com/hashicorp/go-retryablehttp v0.7.7
github.com/hashicorp/golang-lru/v2 v2.0.7
github.com/iancoleman/strcase v0.3.0
- github.com/jackc/pgx/v5 v5.7.2
github.com/magefile/mage v1.15.0
- github.com/mattn/go-sqlite3 v1.14.24
github.com/mitchellh/mapstructure v1.5.0
github.com/open-policy-agent/contrib/logging/plugins/ozap v0.0.0-20240305195513-547d54e7c251
github.com/open-policy-agent/opa v0.70.0
- github.com/opencontainers/go-digest v1.0.0
- github.com/opencontainers/image-spec v1.1.0
github.com/patrickmn/go-cache v2.1.0+incompatible
github.com/prometheus/client_golang v1.20.5
github.com/prometheus/common v0.62.0
github.com/redis/go-redis/v9 v9.7.0
- github.com/spf13/afero v1.12.0
github.com/spf13/cobra v1.8.1
github.com/spf13/viper v1.19.0
github.com/stretchr/testify v1.10.0
github.com/testcontainers/testcontainers-go v0.35.0
- github.com/tursodatabase/libsql-client-go v0.0.0-20240902231107-85af5b9d094d
github.com/twmb/franz-go v1.18.0
github.com/twmb/franz-go/pkg/kadm v1.14.0
github.com/twmb/franz-go/pkg/sr v1.2.0
github.com/twmb/franz-go/plugin/kzap v1.1.2
github.com/xeipuuv/gojsonschema v1.2.0
- github.com/xo/dburl v0.23.2
go.flipt.io/flipt/core v0.0.0-00010101000000-000000000000
go.flipt.io/flipt/errors v1.45.0
go.flipt.io/flipt/rpc/flipt v1.54.0
@@ -106,7 +93,6 @@ require (
gopkg.in/segmentio/analytics-go.v3 v3.1.0
gopkg.in/yaml.v2 v2.4.0
gopkg.in/yaml.v3 v3.0.1
- oras.land/oras-go/v2 v2.5.0
)
require (
@@ -118,7 +104,6 @@ require (
cloud.google.com/go/iam v1.3.0 // indirect
cloud.google.com/go/monitoring v1.22.0 // indirect
dario.cat/mergo v1.0.0 // indirect
- filippo.io/edwards25519 v1.1.0 // indirect
github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 // indirect
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.16.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 // indirect
@@ -137,7 +122,6 @@ require (
github.com/ProtonMail/go-crypto v1.1.3 // indirect
github.com/agnivade/levenshtein v1.2.0 // indirect
github.com/andybalholm/brotli v1.1.0 // indirect
- github.com/antlr4-go/antlr/v4 v4.13.1 // indirect
github.com/aws/aws-sdk-go v1.55.5 // indirect
github.com/aws/aws-sdk-go-v2 v1.32.7 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.7 // indirect
@@ -164,24 +148,18 @@ require (
github.com/cloudflare/circl v1.3.7 // indirect
github.com/cncf/xds/go v0.0.0-20240905190251-b4127c9b8d78 // indirect
github.com/cockroachdb/apd/v3 v3.2.1 // indirect
- github.com/cockroachdb/cockroach-go/v2 v2.1.1 // indirect
- github.com/coder/websocket v1.8.12 // indirect
github.com/containerd/containerd v1.7.23 // indirect
github.com/containerd/continuity v0.4.3 // indirect
github.com/containerd/errdefs v0.3.0 // indirect
github.com/containerd/log v0.1.0 // indirect
github.com/containerd/platforms v0.2.1 // indirect
- github.com/containerd/stargz-snapshotter/estargz v0.15.1 // indirect
github.com/cpuguy83/dockercfg v0.3.2 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect
github.com/cyphar/filepath-securejoin v0.3.6 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/distribution/reference v0.6.0 // indirect
- github.com/docker/cli v27.3.1+incompatible // indirect
- github.com/docker/distribution v2.8.3+incompatible // indirect
github.com/docker/docker v27.3.1+incompatible // indirect
- github.com/docker/docker-credential-helpers v0.8.2 // indirect
github.com/docker/go-units v0.5.0 // indirect
github.com/emirpasic/gods v1.18.1 // indirect
github.com/envoyproxy/go-control-plane/envoy v1.32.3 // indirect
@@ -215,10 +193,6 @@ require (
github.com/hashicorp/go-uuid v1.0.3 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
- github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa // indirect
- github.com/jackc/pgpassfile v1.0.0 // indirect
- github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
- github.com/jackc/puddle/v2 v2.2.2 // indirect
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
@@ -226,9 +200,6 @@ require (
github.com/kevinburke/ssh_config v1.2.0 // indirect
github.com/klauspost/compress v1.17.11 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect
- github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect
- github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect
- github.com/lib/pq v1.10.9 // indirect
github.com/lufia/plan9stats v0.0.0-20240513124658-fba389f38bae // indirect
github.com/magiconair/properties v1.8.7 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
@@ -247,6 +218,8 @@ require (
github.com/morikuni/aec v1.0.0 // indirect
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
github.com/onsi/gomega v1.34.2 // indirect
+ github.com/opencontainers/go-digest v1.0.0 // indirect
+ github.com/opencontainers/image-spec v1.1.0 // indirect
github.com/openzipkin/zipkin-go v0.4.3 // indirect
github.com/paulmach/orb v0.11.1 // indirect
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
@@ -272,6 +245,7 @@ require (
github.com/sirupsen/logrus v1.9.3 // indirect
github.com/skeema/knownhosts v1.3.0 // indirect
github.com/sourcegraph/conc v0.3.0 // indirect
+ github.com/spf13/afero v1.12.0 // indirect
github.com/spf13/cast v1.6.0 // indirect
github.com/spf13/pflag v1.0.5 // indirect
github.com/stretchr/objx v0.5.2 // indirect
@@ -280,7 +254,6 @@ require (
github.com/tklauser/go-sysconf v0.3.14 // indirect
github.com/tklauser/numcpus v0.8.0 // indirect
github.com/twmb/franz-go/pkg/kmsg v1.9.0 // indirect
- github.com/vbatts/tar-split v0.11.6 // indirect
github.com/vmihailenco/go-tinylfu v0.2.2 // indirect
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
@@ -311,6 +284,7 @@ require (
google.golang.org/genproto/googleapis/rpc v0.0.0-20241230172942-26aa7a208def // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/warnings.v0 v0.1.2 // indirect
+ oras.land/oras-go/v2 v2.5.0 // indirect
sigs.k8s.io/yaml v1.4.0 // indirect
)
diff --git a/go.sum b/go.sum
index 67c3685041..1c629994b8 100644
--- a/go.sum
+++ b/go.sum
@@ -27,8 +27,6 @@ cuelang.org/go v0.11.1 h1:pV+49MX1mmvDm8Qh3Za3M786cty8VKPWzQ1Ho4gZRP0=
cuelang.org/go v0.11.1/go.mod h1:PBY6XvPUswPPJ2inpvUozP9mebDVTXaeehQikhZPBz0=
dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk=
dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
-filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
-filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU=
github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8=
github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkkhIiSjQ=
@@ -74,8 +72,6 @@ github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ
github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6lCpQ4fNJ8LE=
github.com/Masterminds/semver/v3 v3.3.1 h1:QtNSWtVZ3nBfk8mAOu/B6v7FMJ+NHTIgUPi7rj+4nv4=
github.com/Masterminds/semver/v3 v3.3.1/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
-github.com/Masterminds/squirrel v1.5.4 h1:uUcX/aBc8O7Fg9kaISIUsHXdKuqehiXAMQTYX8afzqM=
-github.com/Masterminds/squirrel v1.5.4/go.mod h1:NNaOrjSoIDfDA40n7sr2tPNZRfjzjA400rg+riTZj10=
github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY=
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
@@ -87,16 +83,12 @@ github.com/OneOfOne/xxhash v1.2.8 h1:31czK/TI9sNkxIKfaUfGlU47BAxQ0ztGgd9vPyqimf8
github.com/OneOfOne/xxhash v1.2.8/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q=
github.com/ProtonMail/go-crypto v1.1.3 h1:nRBOetoydLeUb4nHajyO2bKqMLfWQ/ZPwkXqXxPxCFk=
github.com/ProtonMail/go-crypto v1.1.3/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE=
-github.com/XSAM/otelsql v0.36.0 h1:SvrlOd/Hp0ttvI9Hu0FUWtISTTDNhQYwxe8WB4J5zxo=
-github.com/XSAM/otelsql v0.36.0/go.mod h1:fo4M8MU+fCn/jDfu+JwTQ0n6myv4cZ+FU5VxrllIlxY=
github.com/agnivade/levenshtein v1.2.0 h1:U9L4IOT0Y3i0TIlUIDJ7rVUziKi/zPbrJGaFrtYH3SY=
github.com/agnivade/levenshtein v1.2.0/go.mod h1:QVVI16kDrtSuwcpd0p1+xMC6Z/VfhtCyDIjcwga4/DU=
github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M=
github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY=
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8=
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4=
-github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYWrPrQ=
-github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw=
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
@@ -123,10 +115,6 @@ github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 h1:VaRN3TlFdd6KxX1x3ILT5ynH6HvK
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1/go.mod h1:FbtygfRFze9usAadmnGJNc8KsP346kEe+y2/oyhGAGc=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.25 h1:r67ps7oHCYnflpgDy2LZU0MAQtQbYIOqNNnqGO6xQkE=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.25/go.mod h1:GrGY+Q4fIokYLtjCVB/aFfCVL6hhGUFl8inD18fDalE=
-github.com/aws/aws-sdk-go-v2/service/ecr v1.38.0 h1:+1IqznlfeMCgFWoWAuwRqykVc6gGoUUQFGXai+77KWs=
-github.com/aws/aws-sdk-go-v2/service/ecr v1.38.0/go.mod h1:NqKnlZvLl4Tp2UH/GEc/nhbjmPQhwOXmLp2eldiszLM=
-github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.29.1 h1:pD3CFGTKwsB8TFjTohMWz0Qb1PuYpI78vYU8s5yhLx8=
-github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.29.1/go.mod h1:aHMIyHh+6N2w3CY24J9JoV5ADnGuMZ7dnOJTzO0Txik=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.1 h1:iXtILhvDxB6kPvEXgsDhGaZCSC6LQET5ZHSdJozeI0Y=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.1/go.mod h1:9nu0fVANtYiAePIBh2/pFUSwtJ402hLnp854CNoDOeE=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.6 h1:HCpPsWqmYQieU7SS6E9HXfdAMSud0pteVXieJmcpIRI=
@@ -178,13 +166,8 @@ github.com/cloudflare/golz4 v0.0.0-20150217214814-ef862a3cdc58/go.mod h1:EOBUe0h
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cncf/xds/go v0.0.0-20240905190251-b4127c9b8d78 h1:QVw89YDxXxEe+l8gU8ETbOasdwEV+avkR75ZzsVV9WI=
github.com/cncf/xds/go v0.0.0-20240905190251-b4127c9b8d78/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=
-github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
github.com/cockroachdb/apd/v3 v3.2.1 h1:U+8j7t0axsIgvQUqthuNm82HIrYXodOV2iWLWtEaIwg=
github.com/cockroachdb/apd/v3 v3.2.1/go.mod h1:klXJcjp+FffLTHlhIG69tezTDvdP065naDsHzKhYSqc=
-github.com/cockroachdb/cockroach-go/v2 v2.1.1 h1:3XzfSMuUT0wBe1a3o5C0eOTcArhmmFAg2Jzh/7hhKqo=
-github.com/cockroachdb/cockroach-go/v2 v2.1.1/go.mod h1:7NtUnP6eK+l6k483WSYNrq3Kb23bWV10IRV1TyeSpwM=
-github.com/coder/websocket v1.8.12 h1:5bUXkEPPIbewrnkU8LTCLVaxi4N4J8ahufH2vlo4NAo=
-github.com/coder/websocket v1.8.12/go.mod h1:LNVeNrXQZfe5qhS9ALED3uA+l5pPqvwXg3CKoDBB2gs=
github.com/containerd/cgroups v1.1.0 h1:v8rEWFl6EoqHB+swVNjVoCJE8o3jX7e8nqBGPLaDFBM=
github.com/containerd/cgroups/v3 v3.0.3 h1:S5ByHZ/h9PMe5IOQoN7E+nMc2UcLEM/V48DGDJ9kip0=
github.com/containerd/cgroups/v3 v3.0.3/go.mod h1:8HBe7V3aWGLFPd/k03swSIsGjZhHI2WzJmticMgVuz0=
@@ -198,18 +181,13 @@ github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I=
github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo=
github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A=
github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw=
-github.com/containerd/stargz-snapshotter/estargz v0.15.1 h1:eXJjw9RbkLFgioVaTG+G/ZW/0kEe2oEKCdS/ZxIyoCU=
-github.com/containerd/stargz-snapshotter/estargz v0.15.1/go.mod h1:gr2RNwukQ/S9Nv33Lt6UC7xEx58C+LHRdoqbEKjz1Kk=
github.com/coreos/go-oidc/v3 v3.12.0 h1:sJk+8G2qq94rDI6ehZ71Bol3oUHy63qNYmkiSjrc/Jo=
github.com/coreos/go-oidc/v3 v3.12.0/go.mod h1:gE3LgjOgFoHi9a4ce4/tJczr0Ai2/BoDhf0r5lltWI0=
-github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
-github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA=
github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc=
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/cpuguy83/go-md2man/v2 v2.0.5 h1:ZtcqGrnekaHpVLArFSe4HK5DoKx1T0rq2DwVB0alcyc=
github.com/cpuguy83/go-md2man/v2 v2.0.5/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
-github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/creack/pty v1.1.17/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY=
@@ -232,14 +210,8 @@ github.com/dhui/dktest v0.4.3 h1:wquqUxAFdcUgabAVLvSCOKOlag5cIZuaOjYIBOWdsR0=
github.com/dhui/dktest v0.4.3/go.mod h1:zNK8IwktWzQRm6I/l2Wjp7MakiyaFWv4G1hjmodmMTs=
github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
-github.com/docker/cli v27.3.1+incompatible h1:qEGdFBF3Xu6SCvCYhc7CzaQTlBmqDuzxPDpigSyeKQQ=
-github.com/docker/cli v27.3.1+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
-github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk=
-github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
github.com/docker/docker v27.3.1+incompatible h1:KttF0XoteNTicmUtBO0L2tP+J7FGRFTjaEF4k6WdhfI=
github.com/docker/docker v27.3.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
-github.com/docker/docker-credential-helpers v0.8.2 h1:bX3YxiGzFP5sOXWc3bTPEXdEaZSeVMrFgOr3T+zrFAo=
-github.com/docker/docker-credential-helpers v0.8.2/go.mod h1:P3ci7E3lwkZg6XiHdRKft1KckHiO9a2rNtyFbZ/ry9M=
github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c=
github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc=
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
@@ -318,14 +290,10 @@ github.com/go-quicktest/qt v1.101.0 h1:O1K29Txy5P2OK0dGo59b7b0LR6wKfIhttaAhHUyn7
github.com/go-quicktest/qt v1.101.0/go.mod h1:14Bz/f7NwaXPtdYEgzsx46kqSxVwTbzVZsDC26tQJow=
github.com/go-redis/cache/v9 v9.0.0 h1:0thdtFo0xJi0/WXbRVu8B066z8OvVymXTJGaXrVWnN0=
github.com/go-redis/cache/v9 v9.0.0/go.mod h1:cMwi1N8ASBOufbIvk7cdXe2PbPjK/WMRL95FFHWsSgI=
-github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
-github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
-github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
-github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
@@ -373,8 +341,6 @@ github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeN
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
-github.com/google/go-containerregistry v0.20.2 h1:B1wPJ1SN/S7pB+ZAimcciVD+r+yV/l/DSArMxlbwseo=
-github.com/google/go-containerregistry v0.20.2/go.mod h1:z38EKdKh4h7IP2gSfUUqEvalZBqs6AoLeWfUy34nQC8=
github.com/google/go-github/v66 v66.0.0 h1:ADJsaXj9UotwdgK8/iFZtv7MLc8E8WBl62WLd/D/9+M=
github.com/google/go-github/v66 v66.0.0/go.mod h1:+4SO9Zkuyf8ytMj0csN1NR/5OTR+MfqPp8P8dVlcvY4=
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
@@ -389,7 +355,6 @@ github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/
github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc=
github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0=
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
-github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/s2a-go v0.1.8 h1:zZDs9gcbt9ZPLV0ndSyQk6Kacx2g/X+SKYovpnz3SMM=
github.com/google/s2a-go v0.1.8/go.mod h1:6iNWHTpQ+nfNRN5E00MSdfDwVesa8hhS32PhPO8deJA=
github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk=
@@ -449,65 +414,12 @@ github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
-github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo=
-github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk=
-github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk=
-github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA=
-github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE=
-github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s=
-github.com/jackc/pgconn v1.4.0/go.mod h1:Y2O3ZDF0q4mMacyWV3AstPJpeHXWGEetiFttmq5lahk=
-github.com/jackc/pgconn v1.5.0/go.mod h1:QeD3lBfpTFe8WUnPZWN5KY/mB8FGMIYRdd8P8Jr0fAI=
-github.com/jackc/pgconn v1.5.1-0.20200601181101-fa742c524853/go.mod h1:QeD3lBfpTFe8WUnPZWN5KY/mB8FGMIYRdd8P8Jr0fAI=
-github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o=
-github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa h1:s+4MhCQ6YrzisK6hFJUX53drDT4UsSW3DEhKn0ifuHw=
-github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa/go.mod h1:a/s9Lp5W7n/DD0VrVoyJ00FbP2ytTPDVOivvn2bMlds=
-github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8=
-github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE=
-github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
-github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
-github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78=
-github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA=
-github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg=
-github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=
-github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=
-github.com/jackc/pgproto3/v2 v2.0.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
-github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
-github.com/jackc/pgservicefile v0.0.0-20200307190119-3430c5407db8/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E=
-github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E=
-github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
-github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
-github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg=
-github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc=
-github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw=
-github.com/jackc/pgtype v1.2.0/go.mod h1:5m2OfMh1wTK7x+Fk952IDmI4nw3nPrvtQdM0ZT4WpC0=
-github.com/jackc/pgtype v1.3.1-0.20200510190516-8cd94a14c75a/go.mod h1:vaogEUkALtxZMCH411K+tKzNpwzCKU+AnPzBKZ+I+Po=
-github.com/jackc/pgtype v1.3.1-0.20200606141011-f6355165a91c/go.mod h1:cvk9Bgu/VzJ9/lxTO5R5sf80p0DiucVtN7ZxvaC4GmQ=
-github.com/jackc/pgtype v1.6.2/go.mod h1:JCULISAZBFGrHaOXIIFiyfzW5VY0GRitRr8NeJsrdig=
-github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y=
-github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM=
-github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc=
-github.com/jackc/pgx/v4 v4.5.0/go.mod h1:EpAKPLdnTorwmPUUsqrPxy5fphV18j9q3wrfRXgo+kA=
-github.com/jackc/pgx/v4 v4.6.1-0.20200510190926-94ba730bb1e9/go.mod h1:t3/cdRQl6fOLDxqtlyhe9UWgfIi9R8+8v8GKV5TRA/o=
-github.com/jackc/pgx/v4 v4.6.1-0.20200606145419-4e5062306904/go.mod h1:ZDaNWkt9sW1JMiNn0kdYBaLelIhw7Pg4qd+Vk6tw7Hg=
-github.com/jackc/pgx/v4 v4.10.1/go.mod h1:QlrWebbs3kqEZPHCTGyxecvzG6tvIsYu+A5b1raylkA=
-github.com/jackc/pgx/v5 v5.7.2 h1:mLoDLV6sonKlvjIEsV56SkWNCnuNv531l94GaIzO+XI=
-github.com/jackc/pgx/v5 v5.7.2/go.mod h1:ncY89UGWxg82EykZUwSpUKEfccBGGYq1xjrOpsbsfGQ=
-github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
-github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
-github.com/jackc/puddle v1.1.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
-github.com/jackc/puddle v1.1.1/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
-github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
-github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
-github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
-github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
-github.com/jinzhu/now v1.1.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
-github.com/jmoiron/sqlx v1.3.1/go.mod h1:2BljVx/86SuTyjE+aPYlHCTNvZrnJXghYGpNiXLBMCQ=
github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA=
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
@@ -524,27 +436,16 @@ github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47e
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
-github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
-github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
-github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 h1:SOEGU9fKiNWd/HOJuq6+3iTQz8KNCLtVX6idSoTLdUw=
-github.com/lann/builder v0.0.0-20180802200727-47ae307949d0/go.mod h1:dXGbAdH5GtBTC4WfIxhKZfyBF/HBFgRZSWwZ9g/He9o=
-github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 h1:P6pPBnrTSX3DEVR4fDembhRWSsG5rVo6hYhAB/ADZrk=
-github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0/go.mod h1:vmVJ0l/dxyfGW6FmdpVm2joNMFikkuWg0EoCKLGUMNw=
-github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
-github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
-github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
-github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
-github.com/lib/pq v1.10.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lufia/plan9stats v0.0.0-20240513124658-fba389f38bae h1:dIZY4ULFcto4tAFlj1FYZl8ztUZ13bdq+PLY+NOfbyI=
@@ -553,32 +454,22 @@ github.com/magefile/mage v1.15.0 h1:BvGheCMAsG3bWUDbZ8AyXXpCNwU9u5CB6sM+HNb9HYg=
github.com/magefile/mage v1.15.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
-github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ=
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
-github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
-github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
-github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
-github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
-github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
-github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
-github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/miekg/dns v1.1.57 h1:Jzi7ApEIzwEPLHWRcafCN9LZSBbqQpxjt/wpgvg7wcM=
github.com/miekg/dns v1.1.57/go.mod h1:uqRjCRUuEAA6qsOiJvDd+CFo/vW+y5WR6SNmHE55hZk=
-github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
-github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0=
github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
@@ -690,20 +581,15 @@ github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqn
github.com/redis/go-redis/v9 v9.0.0-rc.4/go.mod h1:Vo3EsyWnicKnSKCA7HhgnvnyA74wOA69Cd2Meli5mmA=
github.com/redis/go-redis/v9 v9.7.0 h1:HhLSs+B6O021gwzl+locl0zEDnyNkxMtf/Z3NNBMa9E=
github.com/redis/go-redis/v9 v9.7.0/go.mod h1:f6zhXITC7JUJIlPEiBOTXxJgPLdZcA93GewI7inzyWw=
-github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
-github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
-github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU=
-github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ=
github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
-github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
github.com/segmentio/backo-go v1.0.0 h1:kbOAtGJY2DqOR0jfRkYEorx/b18RgtepGtY3+Cpe6qA=
@@ -716,12 +602,8 @@ github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFt
github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ=
github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU=
github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k=
-github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
-github.com/shopspring/decimal v0.0.0-20200227202807-02e2044944cc/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
-github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
-github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
@@ -742,7 +624,6 @@ github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI=
github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
@@ -770,8 +651,6 @@ github.com/tklauser/go-sysconf v0.3.14 h1:g5vzr9iPFFz24v2KZXs/pvpvh8/V9Fw6vQK5ZZ
github.com/tklauser/go-sysconf v0.3.14/go.mod h1:1ym4lWMLUOhuBOPGtRcJm7tEGX4SCYNEEEtghGG/8uY=
github.com/tklauser/numcpus v0.8.0 h1:Mx4Wwe/FjZLeQsK/6kt2EOepwwSl7SmJrK5bV/dXYgY=
github.com/tklauser/numcpus v0.8.0/go.mod h1:ZJZlAY+dmR4eut8epnzf0u/VwodKmryxR8txiloSqBE=
-github.com/tursodatabase/libsql-client-go v0.0.0-20240902231107-85af5b9d094d h1:dOMI4+zEbDI37KGb0TI44GUAwxHF9cMsIoDTJ7UmgfU=
-github.com/tursodatabase/libsql-client-go v0.0.0-20240902231107-85af5b9d094d/go.mod h1:l8xTsYB90uaVdMHXMCxKKLSgw5wLYBwBKKefNIUnm9s=
github.com/twmb/franz-go v1.18.0 h1:25FjMZfdozBywVX+5xrWC2W+W76i0xykKjTdEeD2ejw=
github.com/twmb/franz-go v1.18.0/go.mod h1:zXCGy74M0p5FbXsLeASdyvfLFsBvTubVqctIaa5wQ+I=
github.com/twmb/franz-go/pkg/kadm v1.14.0 h1:nAn1co1lXzJQocpzyIyOFOjUBf4WHWs5/fTprXy2IZs=
@@ -782,8 +661,6 @@ github.com/twmb/franz-go/pkg/sr v1.2.0 h1:zYr0Ly7KLFfeCGaSr8teN6LvAVeYVrZoUsyyPH
github.com/twmb/franz-go/pkg/sr v1.2.0/go.mod h1:gpd2Xl5/prkj3gyugcL+rVzagjaxFqMgvKMYcUlrpDw=
github.com/twmb/franz-go/plugin/kzap v1.1.2 h1:0arX5xJ0soUPX1LlDay6ZZoxuWkWk1lggQ5M/IgRXAE=
github.com/twmb/franz-go/plugin/kzap v1.1.2/go.mod h1:53Cl9Uz1pbdOPDvUISIxLrZIWSa2jCuY1bTMauRMBmo=
-github.com/vbatts/tar-split v0.11.6 h1:4SjTW5+PU11n6fZenf2IPoV8/tz3AaYHMWjf23envGs=
-github.com/vbatts/tar-split v0.11.6/go.mod h1:dqKNtesIOr2j2Qv3W/cHjnvk9I8+G7oAkFDFN6TCBEI=
github.com/vmihailenco/go-tinylfu v0.2.2 h1:H1eiG6HM36iniK6+21n9LLpzx1G9R3DJa2UjUjbynsI=
github.com/vmihailenco/go-tinylfu v0.2.2/go.mod h1:CutYi2Q9puTxfcolkliPq4npPuofg9N9t8JVrjzwa3Q=
github.com/vmihailenco/msgpack/v5 v5.3.4/go.mod h1:7xyJ9e+0+9SaZT0Wt1RGleJXzli6Q/V5KbhBonMG9jc=
@@ -803,8 +680,6 @@ github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHo
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
-github.com/xo/dburl v0.23.2 h1:Fl88cvayrgE56JA/sqhNMLljCW/b7RmG1mMkKMZUFgA=
-github.com/xo/dburl v0.23.2/go.mod h1:uazlaAQxj4gkshhfuuYyvwCBouOmNnG2aDxTCFZpmL4=
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c h1:3lbZUMbMiGUW/LMkfsEABsc5zNT9+b1CvsJx47JzJ8g=
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c/go.mod h1:UrdRz5enIKZ63MEE3IF9l2/ebyx59GyGgPi+tICQdmM=
github.com/yashtewari/glob-intersection v0.2.0 h1:8iuHdN88yYuCzCdjt0gDe+6bAhUwBeEWqThExu54RFg=
@@ -816,7 +691,6 @@ github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
-github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
go.mongodb.org/mongo-driver v1.11.4/go.mod h1:PTSz5yu21bkT/wXpkS7WR5f0ddqw5quethTUn9WM+2g=
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
@@ -870,37 +744,23 @@ go.opentelemetry.io/otel/trace v1.34.0 h1:+ouXS2V8Rd4hp4580a8q23bg0azF2nI8cqLYnC
go.opentelemetry.io/otel/trace v1.34.0/go.mod h1:Svm7lSjQD7kG7KJ/MUHPVXSDGz2OX4h0M2jHBhmSfRE=
go.opentelemetry.io/proto/otlp v1.4.0 h1:TA9WRvW6zMwP+Ssb6fLoUIuirti1gGbP28GcKG1jgeg=
go.opentelemetry.io/proto/otlp v1.4.0/go.mod h1:PPBWZIP98o2ElSqI35IHfu7hIhSwvc5N38Jw8pXuGFY=
-go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
-go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
-go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE=
go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
-go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
-go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU=
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
-go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA=
-go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
-go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI=
go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
gocloud.dev v0.40.0 h1:f8LgP+4WDqOG/RXoUcyLpeIAGOcAbZrZbDQCUee10ng=
gocloud.dev v0.40.0/go.mod h1:drz+VyYNBvrMTW0KZiBAYEdl8lbNZx+OQ7oQvdrFmSQ=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
-golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
-golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
-golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
-golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
-golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
-golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw=
@@ -916,7 +776,6 @@ golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTk
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
-golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
@@ -935,7 +794,6 @@ golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73r
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
@@ -973,15 +831,11 @@ golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -1037,9 +891,7 @@ golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg=
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
-golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
@@ -1057,12 +909,7 @@ golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGm
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
-golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
-golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
-golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
@@ -1077,8 +924,6 @@ golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58
golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps=
golang.org/x/tools v0.29.0 h1:Xx0h3TtM9rzQpQuR4dKLrdglAmCEN5Oi+P74JdhdzXE=
golang.org/x/tools v0.29.0/go.mod h1:KMQVMRsVxU6nHCFXrBPhDB8XncLNLM0lIy/F14RP588=
-golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
-golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@@ -1129,7 +974,6 @@ gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntN
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
-gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s=
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/segmentio/analytics-go.v3 v3.1.0 h1:UzxH1uaGZRpMKDhJyBz0pexz6yUoBU3x8bJsRk/HV6U=
@@ -1148,14 +992,10 @@ gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
-gorm.io/driver/postgres v1.0.8/go.mod h1:4eOzrI1MUfm6ObJU/UcmbXyiHSs8jSwH95G5P5dxcAg=
-gorm.io/gorm v1.20.12/go.mod h1:0HFTzE/SqkGTzK6TlDPPQbAYCluiVvhzoA1+aVyzenw=
-gorm.io/gorm v1.21.4/go.mod h1:0HFTzE/SqkGTzK6TlDPPQbAYCluiVvhzoA1+aVyzenw=
gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU=
gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
-honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
oras.land/oras-go/v2 v2.5.0 h1:o8Me9kLY74Vp5uw07QXPiitjsw7qNXi8Twd+19Zf02c=
oras.land/oras-go/v2 v2.5.0/go.mod h1:z4eisnLP530vwIOUOJeBIj0aGI0L1C3d53atvCBqZHg=
sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E=
diff --git a/go.work.sum b/go.work.sum
index 51e2601a97..f1b806d582 100644
--- a/go.work.sum
+++ b/go.work.sum
@@ -566,6 +566,8 @@ cuelang.org/go v0.10.0/go.mod h1:HzlaqqqInHNiqE6slTP6+UtxT9hN6DAzgJgdbNxXvX8=
cuelang.org/go v0.10.1/go.mod h1:HzlaqqqInHNiqE6slTP6+UtxT9hN6DAzgJgdbNxXvX8=
cuelang.org/go v0.11.0/go.mod h1:PBY6XvPUswPPJ2inpvUozP9mebDVTXaeehQikhZPBz0=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
+filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
+filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4=
github.com/99designs/keyring v1.2.1/go.mod h1:fc+wB5KTk9wQ9sDx0kFXB3A0MaeGHM9AwRStKOQ5vOA=
github.com/AdaLogics/go-fuzz-headers v0.0.0-20210715213245-6c3934b029d8/go.mod h1:CzsSbkDixRphAF5hS6wbMKq0eI6ccJRb7/A0M6JBnwg=
@@ -813,6 +815,8 @@ github.com/cncf/xds/go v0.0.0-20240318125728-8a4994d93e50/go.mod h1:5e1+Vvlzido6
github.com/cncf/xds/go v0.0.0-20240423153145-555b57ec207b/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=
github.com/cncf/xds/go v0.0.0-20240723142845-024c85f92f20/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
+github.com/cockroachdb/cockroach-go/v2 v2.1.1 h1:3XzfSMuUT0wBe1a3o5C0eOTcArhmmFAg2Jzh/7hhKqo=
+github.com/cockroachdb/cockroach-go/v2 v2.1.1/go.mod h1:7NtUnP6eK+l6k483WSYNrq3Kb23bWV10IRV1TyeSpwM=
github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8=
github.com/cockroachdb/datadriven v0.0.0-20200714090401-bf6692d28da5/go.mod h1:h6jFvWxBdQXxjopDMZyH2UVceIRfR84bdzbkoKrsWNo=
github.com/cockroachdb/errors v1.2.4/go.mod h1:rQD95gz6FARkaKkQXUksEje/d9a6wBJoCr5oaCLELYA=
@@ -1142,6 +1146,8 @@ github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+
github.com/go-redis/redis v6.15.8+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA=
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-sql-driver/mysql v1.7.1/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
+github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
+github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls=
github.com/go-toolsmith/astcopy v1.0.2/go.mod h1:4TcEdbElGc9twQEYpVo/aieIXfHhiuLh4aLAck6dO7Y=
github.com/gobuffalo/here v0.6.0/go.mod h1:wAG085dHOYqUpf+Ap+WOdrPTp5IYcDAs/x7PLa8Y5fM=
@@ -1368,13 +1374,21 @@ github.com/intel/goresctrl v0.3.0/go.mod h1:fdz3mD85cmP9sHD8JUlrNWAxvwM86CrbmVXl
github.com/invopop/jsonschema v0.7.0/go.mod h1:O9uiLokuu0+MGFlyiaqtWxwqJm41/+8Nj0lD7A36YH0=
github.com/j-keck/arping v0.0.0-20160618110441-2cf9dc699c56/go.mod h1:ymszkNOg6tORTn+6F6j+Jc8TOr5osrynvN6ivFWZ2GA=
github.com/j-keck/arping v1.0.2/go.mod h1:aJbELhR92bSk7tp79AWM/ftfc90EfEi2bQJrbBFOsPw=
+github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk=
github.com/jackc/pgconn v1.14.3/go.mod h1:RZbme4uasqzybK2RK5c65VsHxoyaml09lx3tXOcO/VM=
+github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa h1:s+4MhCQ6YrzisK6hFJUX53drDT4UsSW3DEhKn0ifuHw=
+github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa/go.mod h1:a/s9Lp5W7n/DD0VrVoyJ00FbP2ytTPDVOivvn2bMlds=
+github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8=
+github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
+github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
github.com/jackc/pgproto3/v2 v2.3.3/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
+github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgtype v1.14.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4=
github.com/jackc/pgx/v4 v4.18.2/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw=
github.com/jackc/pgx/v5 v5.4.3/go.mod h1:Ig06C2Vu0t5qXC60W8sqIthScaEnFvojjj9dSljmHRA=
github.com/jackc/puddle v1.1.3 h1:JnPg/5Q9xVJGfjsO5CPUOjnJps1JaRUm8I9FXVCFK94=
+github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk=
github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
github.com/jackpal/gateway v1.0.15/go.mod h1:dbyEDcDhHUh9EmjB9ung81elMUZfG0SoNc2TfTbcj4c=
github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs=
@@ -1473,6 +1487,8 @@ github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh
github.com/mattn/go-shellwords v1.0.3/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o=
github.com/mattn/go-shellwords v1.0.6/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o=
github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y=
+github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
+github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mattn/goveralls v0.0.12/go.mod h1:44ImGEUfmqH8bBtaMrYKsM65LXfNLWmwaxFGjZwgMSQ=
github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4=
github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4=
diff --git a/internal/cmd/authn.go b/internal/cmd/authn.go
index 119cb4b31e..f03d7ef0c6 100644
--- a/internal/cmd/authn.go
+++ b/internal/cmd/authn.go
@@ -13,7 +13,6 @@ import (
"github.com/grpc-ecosystem/go-grpc-middleware/v2/interceptors/selector"
"github.com/grpc-ecosystem/grpc-gateway/v2/runtime"
"github.com/hashicorp/cap/jwt"
- "go.flipt.io/flipt/internal/cleanup"
"go.flipt.io/flipt/internal/config"
"go.flipt.io/flipt/internal/containers"
"go.flipt.io/flipt/internal/gateway"
@@ -29,8 +28,6 @@ import (
storageauth "go.flipt.io/flipt/internal/storage/authn"
storageauthcache "go.flipt.io/flipt/internal/storage/authn/cache"
storageauthmemory "go.flipt.io/flipt/internal/storage/authn/memory"
- authsql "go.flipt.io/flipt/internal/storage/authn/sql"
- oplocksql "go.flipt.io/flipt/internal/storage/oplock/sql"
rpcauth "go.flipt.io/flipt/rpc/flipt/auth"
"go.uber.org/zap"
"google.golang.org/grpc"
@@ -47,42 +44,6 @@ func getAuthStore(
shutdown = func(context.Context) error { return nil }
)
- if cfg.Authentication.RequiresDatabase() {
- _, builder, driver, dbShutdown, err := getDB(ctx, logger, cfg, forceMigrate)
- if err != nil {
- return nil, nil, err
- }
-
- store = authsql.NewStore(driver, builder, logger)
- shutdown = dbShutdown
-
- if cfg.Authentication.ShouldRunCleanup() {
- var (
- oplock = oplocksql.New(logger, driver, builder)
- cleanup = cleanup.NewAuthenticationService(
- logger,
- oplock,
- store,
- cfg.Authentication,
- )
- )
-
- cleanup.Run(ctx)
-
- dbShutdown := shutdown
- shutdown = func(ctx context.Context) error {
- logger.Info("shutting down authentication cleanup service...")
-
- if err := cleanup.Shutdown(ctx); err != nil {
- _ = dbShutdown(ctx)
- return err
- }
-
- return dbShutdown(ctx)
- }
- }
- }
-
return store, shutdown, nil
}
@@ -91,7 +52,6 @@ func authenticationGRPC(
logger *zap.Logger,
cfg *config.Config,
forceMigrate bool,
- tokenDeletedEnabled bool,
authOpts ...containers.Option[authmiddlewaregrpc.InterceptorOptions],
) (grpcRegisterers, []grpc.UnaryServerInterceptor, func(context.Context) error, error) {
@@ -105,7 +65,7 @@ func authenticationGRPC(
// FS backends are configured.
// All that is required to establish a connection for authentication is to either make auth required
// or configure at-least one authentication method (e.g. enable token method).
- if !authCfg.Enabled() && (cfg.Storage.Type != config.DatabaseStorageType) {
+ if !authCfg.Enabled() {
return grpcRegisterers{
public.NewServer(logger, authCfg),
authn.NewServer(logger, storageauthmemory.NewStore()),
@@ -126,7 +86,7 @@ func authenticationGRPC(
}
var (
- authServer = authn.NewServer(logger, store, authn.WithAuditLoggingEnabled(tokenDeletedEnabled))
+ authServer = authn.NewServer(logger, store)
publicServer = public.NewServer(logger, authCfg)
register = grpcRegisterers{
diff --git a/internal/cmd/grpc.go b/internal/cmd/grpc.go
index f7fe47ee21..1f2c3d1b53 100644
--- a/internal/cmd/grpc.go
+++ b/internal/cmd/grpc.go
@@ -2,7 +2,6 @@ package cmd
import (
"context"
- "database/sql"
"errors"
"fmt"
"net"
@@ -15,7 +14,6 @@ import (
"go.opentelemetry.io/contrib/propagators/autoprop"
- sq "github.com/Masterminds/squirrel"
"go.flipt.io/flipt/internal/cache"
"go.flipt.io/flipt/internal/cache/memory"
"go.flipt.io/flipt/internal/cache/redis"
@@ -44,10 +42,6 @@ import (
"go.flipt.io/flipt/internal/storage"
storagecache "go.flipt.io/flipt/internal/storage/cache"
fsstore "go.flipt.io/flipt/internal/storage/fs/store"
- fliptsql "go.flipt.io/flipt/internal/storage/sql"
- "go.flipt.io/flipt/internal/storage/sql/mysql"
- "go.flipt.io/flipt/internal/storage/sql/postgres"
- "go.flipt.io/flipt/internal/storage/sql/sqlite"
"go.flipt.io/flipt/internal/tracing"
"go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc"
"go.opentelemetry.io/otel"
@@ -129,33 +123,9 @@ func NewGRPCServer(
var store storage.Store
- switch cfg.Storage.Type {
- case "", config.DatabaseStorageType:
- db, builder, driver, dbShutdown, err := getDB(ctx, logger, cfg, forceMigrate)
- if err != nil {
- return nil, err
- }
-
- server.onShutdown(dbShutdown)
-
- switch driver {
- case fliptsql.SQLite, fliptsql.LibSQL:
- store = sqlite.NewStore(db, builder, logger)
- case fliptsql.Postgres, fliptsql.CockroachDB:
- store = postgres.NewStore(db, builder, logger)
- case fliptsql.MySQL:
- store = mysql.NewStore(db, builder, logger)
- default:
- return nil, fmt.Errorf("unsupported driver: %s", driver)
- }
-
- logger.Debug("database driver configured", zap.Stringer("driver", driver))
- default:
- // otherwise, attempt to configure a declarative backend store
- store, err = fsstore.NewStore(ctx, logger, cfg)
- if err != nil {
- return nil, err
- }
+ store, err = fsstore.NewStore(ctx, logger, cfg)
+ if err != nil {
+ return nil, err
}
logger.Debug("store enabled", zap.Stringer("store", store))
@@ -279,29 +249,11 @@ func NewGRPCServer(
skipAuthnIfExcluded(evaldatasrv, cfg.Authentication.Exclude.Evaluation)
skipAuthnIfExcluded(ofrepsrv, cfg.Authentication.Exclude.OFREP)
- var checker audit.EventPairChecker = &audit.NoOpChecker{}
-
- // We have to check if audit logging is enabled here for informing the authentication service that
- // the user would like to receive token:deleted events.
- if cfg.Audit.Enabled() {
- var err error
- checker, err = audit.NewChecker(cfg.Audit.Events)
- if err != nil {
- return nil, err
- }
- }
-
- var tokenDeletedEnabled bool
- if checker != nil {
- tokenDeletedEnabled = checker.Check("token:deleted")
- }
-
register, authInterceptors, authShutdown, err := authenticationGRPC(
ctx,
logger,
cfg,
forceMigrate,
- tokenDeletedEnabled,
authnOpts...,
)
if err != nil {
@@ -420,7 +372,7 @@ func NewGRPCServer(
// based on audit sink configuration from the user, provision the audit sinks and add them to a slice,
// and if the slice has a non-zero length, add the audit sink interceptor
if len(sinks) > 0 {
- interceptors = append(interceptors, middlewaregrpc.AuditEventUnaryInterceptor(logger, checker))
+ interceptors = append(interceptors, middlewaregrpc.AuditEventUnaryInterceptor(logger))
spanExporter := audit.NewSinkSpanExporter(logger, sinks)
@@ -430,7 +382,6 @@ func NewGRPCServer(
zap.Stringers("sinks", sinks),
zap.Int("buffer capacity", cfg.Audit.Buffer.Capacity),
zap.String("flush period", cfg.Audit.Buffer.FlushPeriod.String()),
- zap.Strings("events", checker.Events()),
)
server.onShutdown(func(ctx context.Context) error {
@@ -614,57 +565,6 @@ func getCache(ctx context.Context, cfg *config.Config) (cache.Cacher, errFunc, e
return cacher, cacheFunc, cacheErr
}
-var (
- dbOnce sync.Once
- db *sql.DB
- builder sq.StatementBuilderType
- driver fliptsql.Driver
- dbFunc errFunc = func(context.Context) error { return nil }
- dbErr error
-)
-
-func getDB(ctx context.Context, logger *zap.Logger, cfg *config.Config, forceMigrate bool) (*sql.DB, sq.StatementBuilderType, fliptsql.Driver, errFunc, error) {
- dbOnce.Do(func() {
- migrator, err := fliptsql.NewMigrator(*cfg, logger)
- if err != nil {
- dbErr = err
- return
- }
-
- if err := migrator.Up(forceMigrate); err != nil {
- migrator.Close()
- dbErr = err
- return
- }
-
- migrator.Close()
-
- db, driver, err = fliptsql.Open(*cfg)
- if err != nil {
- dbErr = fmt.Errorf("opening db: %w", err)
- return
- }
-
- logger.Debug("constructing builder", zap.Bool("prepared_statements", cfg.Database.PreparedStatementsEnabled))
-
- builder = fliptsql.BuilderFor(db, driver, cfg.Database.PreparedStatementsEnabled)
-
- dbFunc = func(context.Context) error {
- return db.Close()
- }
-
- if driver == fliptsql.SQLite && cfg.Database.MaxOpenConn > 1 {
- logger.Warn("ignoring config.db.max_open_conn due to driver limitation (sqlite)", zap.Int("attempted_max_conn", cfg.Database.MaxOpenConn))
- }
-
- if err := db.PingContext(ctx); err != nil {
- dbErr = fmt.Errorf("pinging db: %w", err)
- }
- })
-
- return db, builder, driver, dbFunc, dbErr
-}
-
// getStringSlice receives any slice which the underline member type is "string"
// and return a new slice with the same members but transformed to "string" type.
// This is useful when we want to convert an enum slice of strings.
diff --git a/internal/cmd/grpc_test.go b/internal/cmd/grpc_test.go
deleted file mode 100644
index 447485f2a4..0000000000
--- a/internal/cmd/grpc_test.go
+++ /dev/null
@@ -1,29 +0,0 @@
-package cmd
-
-import (
- "context"
- "fmt"
- "path/filepath"
- "testing"
-
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
- "go.flipt.io/flipt/internal/config"
- "go.flipt.io/flipt/internal/info"
- "go.uber.org/zap/zaptest"
-)
-
-func TestNewGRPCServer(t *testing.T) {
- tmp := t.TempDir()
- cfg := &config.Config{}
- cfg.Database.URL = fmt.Sprintf("file:%s", filepath.Join(tmp, "flipt.db"))
- ctx, cancel := context.WithCancel(context.Background())
- t.Cleanup(cancel)
- s, err := NewGRPCServer(ctx, zaptest.NewLogger(t), cfg, info.Flipt{}, false)
- require.NoError(t, err)
- t.Cleanup(func() {
- err := s.Shutdown(ctx)
- assert.NoError(t, err)
- })
- assert.NotEmpty(t, s.Server.GetServiceInfo())
-}
diff --git a/internal/cmd/util/browser.go b/internal/cmd/util/browser.go
deleted file mode 100644
index 1267de8c48..0000000000
--- a/internal/cmd/util/browser.go
+++ /dev/null
@@ -1,27 +0,0 @@
-package util
-
-import (
- "fmt"
- "os/exec"
- "runtime"
-)
-
-// OpenBrowser opens the specified URL in the default browser of the user.
-func OpenBrowser(url string) error {
- var (
- cmd string
- args []string
- )
-
- fmt.Printf("Attempting to open your browser...\nIf this does not work, please navigate to: %q\n", url)
-
- switch runtime.GOOS {
- case "darwin":
- cmd = "open"
- default: // "linux", "freebsd", "openbsd", "netbsd"
- cmd = "xdg-open"
- }
-
- args = append(args, url)
- return exec.Command(cmd, args...).Start()
-}
diff --git a/internal/config/audit.go b/internal/config/audit.go
index 70e8298512..dc47c654fc 100644
--- a/internal/config/audit.go
+++ b/internal/config/audit.go
@@ -17,12 +17,11 @@ var (
type AuditConfig struct {
Sinks SinksConfig `json:"sinks,omitempty" mapstructure:"sinks" yaml:"sinks,omitempty"`
Buffer BufferConfig `json:"buffer,omitempty" mapstructure:"buffer" yaml:"buffer,omitempty"`
- Events []string `json:"events,omitempty" mapstructure:"events" yaml:"events,omitempty"`
}
// Enabled returns true if any nested sink is enabled
func (c AuditConfig) Enabled() bool {
- return c.Sinks.Log.Enabled || c.Sinks.Webhook.Enabled || c.Sinks.Cloud.Enabled || c.Sinks.Kafka.Enabled
+ return c.Sinks.Log.Enabled || c.Sinks.Webhook.Enabled || c.Sinks.Kafka.Enabled
}
func (c AuditConfig) IsZero() bool {
@@ -85,7 +84,6 @@ func (c *AuditConfig) validate() error {
type SinksConfig struct {
Log LogSinkConfig `json:"log,omitempty" mapstructure:"log" yaml:"log,omitempty"`
Webhook WebhookSinkConfig `json:"webhook,omitempty" mapstructure:"webhook" yaml:"webhook,omitempty"`
- Cloud CloudSinkConfig `json:"cloud,omitempty" mapstructure:"cloud" yaml:"cloud,omitempty"`
Kafka KafkaSinkConfig `json:"kafka,omitempty" mapstructure:"kafka" yaml:"kafka,omitempty"`
}
diff --git a/internal/config/audit_test.go b/internal/config/audit_test.go
index 3791807ac0..5466a15f74 100644
--- a/internal/config/audit_test.go
+++ b/internal/config/audit_test.go
@@ -27,11 +27,6 @@ func TestAuditEnabled(t *testing.T) {
f: func() AuditConfig { c := AuditConfig{}; c.Sinks.Webhook.Enabled = true; return c },
expected: true,
},
- {
- sink: "cloud",
- f: func() AuditConfig { c := AuditConfig{}; c.Sinks.Cloud.Enabled = true; return c },
- expected: true,
- },
{
sink: "kafka",
f: func() AuditConfig { c := AuditConfig{}; c.Sinks.Kafka.Enabled = true; return c },
diff --git a/internal/config/authentication.go b/internal/config/authentication.go
index ba2174db21..99ad113d6d 100644
--- a/internal/config/authentication.go
+++ b/internal/config/authentication.go
@@ -202,14 +202,6 @@ func (c *AuthenticationConfig) validate() error {
return nil
}
-func (c *AuthenticationConfig) deprecations(v *viper.Viper) []deprecated {
- if v.Get("authentication.exclude.metadata") != nil {
- return []deprecated{deprecateAuthenticationExcludeMetdata}
- }
-
- return nil
-}
-
func getHostname(rawurl string) (string, error) {
if !strings.Contains(rawurl, "://") {
rawurl = "http://" + rawurl
diff --git a/internal/config/cache.go b/internal/config/cache.go
index f1a615da9c..a778b685f1 100644
--- a/internal/config/cache.go
+++ b/internal/config/cache.go
@@ -1,7 +1,6 @@
package config
import (
- "encoding/json"
"errors"
"time"
@@ -59,40 +58,16 @@ func (c CacheConfig) IsZero() bool {
}
// CacheBackend is either memory or redis
-// TODO: can we use a string here instead?
-type CacheBackend uint8
-
-func (c CacheBackend) String() string {
- return cacheBackendToString[c]
-}
-
-func (c CacheBackend) MarshalJSON() ([]byte, error) {
- return json.Marshal(c.String())
-}
-
-func (c CacheBackend) MarshalYAML() (interface{}, error) {
- return c.String(), nil
-}
+type CacheBackend string
const (
- _ CacheBackend = iota
- // CacheMemory ...
- CacheMemory
- // CacheRedis ...
- CacheRedis
+ CacheMemory CacheBackend = "memory"
+ CacheRedis CacheBackend = "redis"
)
-var (
- cacheBackendToString = map[CacheBackend]string{
- CacheMemory: "memory",
- CacheRedis: "redis",
- }
-
- stringToCacheBackend = map[string]CacheBackend{
- "memory": CacheMemory,
- "redis": CacheRedis,
- }
-)
+func (c CacheBackend) String() string {
+ return string(c)
+}
// MemoryCacheConfig contains fields, which configure in-memory caching.
type MemoryCacheConfig struct {
diff --git a/internal/config/config.go b/internal/config/config.go
index 1a57a4e3e7..1e9f283c2a 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -23,7 +23,7 @@ import (
)
const (
- Version = "1.0"
+ Version = "2.0"
EnvPrefix = "FLIPT"
)
@@ -36,10 +36,6 @@ var DecodeHooks = []mapstructure.DecodeHookFunc{
stringToEnvsubstHookFunc(),
mapstructure.StringToTimeDurationHookFunc(),
stringToSliceHookFunc(),
- stringToEnumHookFunc(stringToCacheBackend),
- stringToEnumHookFunc(stringToTracingExporter),
- stringToEnumHookFunc(stringToScheme),
- stringToEnumHookFunc(stringToDatabaseProtocol),
stringToEnumHookFunc(stringToAuthMethod),
}
@@ -62,7 +58,6 @@ type Config struct {
Authorization AuthorizationConfig `json:"authorization,omitempty" mapstructure:"authorization" yaml:"authorization,omitempty"`
Cache CacheConfig `json:"cache,omitempty" mapstructure:"cache" yaml:"cache,omitempty"`
Cors CorsConfig `json:"cors,omitempty" mapstructure:"cors" yaml:"cors,omitempty"`
- Database DatabaseConfig `json:"db,omitempty" mapstructure:"db" yaml:"db,omitempty"`
Diagnostics DiagnosticConfig `json:"diagnostics,omitempty" mapstructure:"diagnostics" yaml:"diagnostics,omitempty"`
Experimental ExperimentalConfig `json:"experimental,omitempty" mapstructure:"experimental" yaml:"experimental,omitempty"`
Log LogConfig `json:"log,omitempty" mapstructure:"log" yaml:"log,omitempty"`
@@ -518,13 +513,6 @@ func stringToSliceHookFunc() mapstructure.DecodeHookFunc {
// Default is the base config used when no configuration is explicit provided.
func Default() *Config {
- dbRoot, err := defaultDatabaseRoot()
- if err != nil {
- panic(err)
- }
-
- dbPath := filepath.ToSlash(filepath.Join(dbRoot, "flipt.db"))
-
return &Config{
Log: LogConfig{
Level: "INFO",
@@ -549,9 +537,6 @@ func Default() *Config {
"Authorization",
"Content-Type",
"X-CSRF-Token",
- "X-Fern-Language",
- "X-Fern-SDK-Name",
- "X-Fern-SDK-Version",
"X-Flipt-Namespace",
"X-Flipt-Accept-Server-Version",
},
@@ -616,14 +601,8 @@ func Default() *Config {
},
},
- Database: DatabaseConfig{
- URL: "file:" + dbPath,
- MaxIdleConn: 2,
- PreparedStatementsEnabled: true,
- },
-
Storage: StorageConfig{
- Type: DatabaseStorageType,
+ Type: LocalStorageType,
},
Meta: MetaConfig{
@@ -653,7 +632,6 @@ func Default() *Config {
Capacity: 2,
FlushPeriod: 2 * time.Minute,
},
- Events: []string{"*:*"},
},
Analytics: AnalyticsConfig{
diff --git a/internal/config/config_test.go b/internal/config/config_test.go
index 62d0b51748..d12a2e07ba 100644
--- a/internal/config/config_test.go
+++ b/internal/config/config_test.go
@@ -19,7 +19,6 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/xeipuuv/gojsonschema"
- "go.flipt.io/flipt/internal/oci"
"gocloud.dev/blob"
"gocloud.dev/blob/memblob"
"gopkg.in/yaml.v2"
@@ -58,9 +57,6 @@ func TestScheme(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
assert.Equal(t, want, scheme.String())
- json, err := scheme.MarshalJSON()
- require.NoError(t, err)
- assert.JSONEq(t, fmt.Sprintf("%q", want), string(json))
})
}
}
@@ -91,9 +87,6 @@ func TestCacheBackend(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
assert.Equal(t, want, backend.String())
- json, err := backend.MarshalJSON()
- require.NoError(t, err)
- assert.JSONEq(t, fmt.Sprintf("%q", want), string(json))
})
}
}
@@ -129,57 +122,6 @@ func TestTracingExporter(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
assert.Equal(t, want, exporter.String())
- json, err := exporter.MarshalJSON()
- require.NoError(t, err)
- assert.JSONEq(t, fmt.Sprintf("%q", want), string(json))
- })
- }
-}
-
-func TestDatabaseProtocol(t *testing.T) {
- tests := []struct {
- name string
- protocol DatabaseProtocol
- want string
- }{
- {
- name: "postgres",
- protocol: DatabasePostgres,
- want: "postgres",
- },
- {
- name: "mysql",
- protocol: DatabaseMySQL,
- want: "mysql",
- },
- {
- name: "sqlite",
- protocol: DatabaseSQLite,
- want: "sqlite",
- },
- {
- name: "cockroachdb",
- protocol: DatabaseCockroachDB,
- want: "cockroachdb",
- },
- {
- name: "libsql",
- protocol: DatabaseLibSQL,
- want: "libsql",
- },
- }
-
- for _, tt := range tests {
- var (
- protocol = tt.protocol
- want = tt.want
- )
-
- t.Run(tt.name, func(t *testing.T) {
- assert.Equal(t, want, protocol.String())
- json, err := protocol.MarshalJSON()
- require.NoError(t, err)
- assert.JSONEq(t, fmt.Sprintf("%q", want), string(json))
})
}
}
@@ -210,9 +152,6 @@ func TestLogEncoding(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
assert.Equal(t, want, encoding.String())
- json, err := encoding.MarshalJSON()
- require.NoError(t, err)
- assert.JSONEq(t, fmt.Sprintf("%q", want), string(json))
})
}
}
@@ -433,24 +372,6 @@ func TestLoad(t *testing.T) {
return cfg
},
},
- {
- name: "database key/value",
- path: "./testdata/database.yml",
- expected: func() *Config {
- cfg := Default()
- cfg.Database = DatabaseConfig{
- Protocol: DatabaseMySQL,
- Host: "localhost",
- Port: 3306,
- User: "flipt",
- Password: "s3cr3t!",
- Name: "flipt",
- MaxIdleConn: 2,
- PreparedStatementsEnabled: true,
- }
- return cfg
- },
- },
{
name: "server https missing cert file",
path: "./testdata/server/https_missing_cert_file.yml",
@@ -771,7 +692,6 @@ func TestLoad(t *testing.T) {
Capacity: 10,
FlushPeriod: 3 * time.Minute,
},
- Events: []string{"*:*"},
}
cfg.Log = LogConfig{
@@ -848,14 +768,6 @@ func TestLoad(t *testing.T) {
},
}
- cfg.Database = DatabaseConfig{
- URL: "postgres://postgres@localhost:5432/flipt?sslmode=disable",
- MaxIdleConn: 10,
- MaxOpenConn: 50,
- ConnMaxLifetime: 30 * time.Minute,
- PreparedStatementsEnabled: true,
- }
-
cfg.Meta = MetaConfig{
CheckForUpdates: false,
TelemetryEnabled: false,
@@ -1198,107 +1110,6 @@ func TestLoad(t *testing.T) {
return cfg
},
},
- {
- name: "OCI config provided",
- path: "./testdata/storage/oci_provided.yml",
- expected: func() *Config {
- cfg := Default()
- cfg.Storage = StorageConfig{
- Type: OCIStorageType,
- OCI: &StorageOCIConfig{
- Repository: "some.target/repository/abundle:latest",
- BundlesDirectory: "/tmp/bundles",
- Authentication: &OCIAuthentication{
- Type: oci.AuthenticationTypeStatic,
- Username: "foo",
- Password: "bar",
- },
- PollInterval: 5 * time.Minute,
- ManifestVersion: "1.1",
- },
- }
- return cfg
- },
- },
- {
- name: "OCI config provided full",
- path: "./testdata/storage/oci_provided_full.yml",
- expected: func() *Config {
- cfg := Default()
- cfg.Storage = StorageConfig{
- Type: OCIStorageType,
- OCI: &StorageOCIConfig{
- Repository: "some.target/repository/abundle:latest",
- BundlesDirectory: "/tmp/bundles",
- Authentication: &OCIAuthentication{
- Type: oci.AuthenticationTypeStatic,
- Username: "foo",
- Password: "bar",
- },
- PollInterval: 5 * time.Minute,
- ManifestVersion: "1.0",
- },
- }
- return cfg
- },
- },
- {
- name: "OCI config provided AWS ECR",
- path: "./testdata/storage/oci_provided_aws_ecr.yml",
- expected: func() *Config {
- cfg := Default()
- cfg.Storage = StorageConfig{
- Type: OCIStorageType,
- OCI: &StorageOCIConfig{
- Repository: "some.target/repository/abundle:latest",
- BundlesDirectory: "/tmp/bundles",
- Authentication: &OCIAuthentication{
- Type: oci.AuthenticationTypeAWSECR,
- },
- PollInterval: 5 * time.Minute,
- ManifestVersion: "1.1",
- },
- }
- return cfg
- },
- },
- {
- name: "OCI config provided with no authentication",
- path: "./testdata/storage/oci_provided_no_auth.yml",
- expected: func() *Config {
- cfg := Default()
- cfg.Storage = StorageConfig{
- Type: OCIStorageType,
- OCI: &StorageOCIConfig{
- Repository: "some.target/repository/abundle:latest",
- BundlesDirectory: "/tmp/bundles",
- PollInterval: 5 * time.Minute,
- ManifestVersion: "1.1",
- },
- }
- return cfg
- },
- },
- {
- name: "OCI config provided with invalid authentication type",
- path: "./testdata/storage/oci_provided_invalid_auth.yml",
- wantErr: errors.New("oci authentication type is not supported"),
- },
- {
- name: "OCI invalid no repository",
- path: "./testdata/storage/oci_invalid_no_repo.yml",
- wantErr: errors.New("oci storage repository must be specified"),
- },
- {
- name: "OCI invalid unexpected scheme",
- path: "./testdata/storage/oci_invalid_unexpected_scheme.yml",
- wantErr: errors.New("validating OCI configuration: unexpected repository scheme: \"unknown\" should be one of [http|https|flipt]"),
- },
- {
- name: "OCI invalid wrong manifest version",
- path: "./testdata/storage/oci_invalid_manifest_version.yml",
- wantErr: errors.New("wrong manifest version, it should be 1.0 or 1.1"),
- },
{
name: "storage readonly config invalid",
path: "./testdata/storage/invalid_readonly.yml",
@@ -1573,10 +1384,7 @@ func TestMarshalYAML(t *testing.T) {
name: "defaults",
path: "./testdata/marshal/yaml/default.yml",
cfg: func() *Config {
- cfg := Default()
- // override the database URL to a file path for testing
- cfg.Database.URL = "file:/tmp/flipt/flipt.db"
- return cfg
+ return Default()
},
},
}
diff --git a/internal/config/database.go b/internal/config/database.go
deleted file mode 100644
index 57eea6973e..0000000000
--- a/internal/config/database.go
+++ /dev/null
@@ -1,122 +0,0 @@
-package config
-
-import (
- "encoding/json"
- "fmt"
- "path/filepath"
- "time"
-
- "github.com/spf13/viper"
-)
-
-var (
- _ defaulter = (*DatabaseConfig)(nil)
- _ validator = (*DatabaseConfig)(nil)
-)
-
-const (
- // database protocol enum
- _ DatabaseProtocol = iota
- // DatabaseSQLite ...
- DatabaseSQLite
- // DatabasePostgres ...
- DatabasePostgres
- // DatabaseMySQL ...
- DatabaseMySQL
- // DatabaseCockroachDB ...
- DatabaseCockroachDB
- // DatabaseLibSQL ...
- DatabaseLibSQL
-)
-
-// DatabaseConfig contains fields, which configure the various relational database backends.
-//
-// Flipt currently supports SQLite, Postgres and MySQL backends.
-type DatabaseConfig struct {
- URL string `json:"-" mapstructure:"url,omitempty" yaml:"url,omitempty"`
- MaxIdleConn int `json:"maxIdleConn,omitempty" mapstructure:"max_idle_conn" yaml:"max_idle_conn,omitempty"`
- MaxOpenConn int `json:"maxOpenConn,omitempty" mapstructure:"max_open_conn" yaml:"max_open_conn,omitempty"`
- ConnMaxLifetime time.Duration `json:"connMaxLifetime,omitempty" mapstructure:"conn_max_lifetime" yaml:"conn_max_lifetime,omitempty"`
- Name string `json:"name,omitempty" mapstructure:"name,omitempty" yaml:"name,omitempty"`
- User string `json:"-" mapstructure:"user,omitempty" yaml:"user,omitempty"`
- Password string `json:"-" mapstructure:"password,omitempty" yaml:"-"`
- Host string `json:"host,omitempty" mapstructure:"host,omitempty" yaml:"host,omitempty"`
- Port int `json:"port,omitempty" mapstructure:"port,omitempty" yaml:"port,omitempty"`
- Protocol DatabaseProtocol `json:"protocol,omitempty" mapstructure:"protocol,omitempty" yaml:"protocol,omitempty"`
- PreparedStatementsEnabled bool `json:"preparedStatementsEnabled,omitempty" mapstructure:"prepared_statements_enabled" yaml:"prepared_statements_enabled,omitempty"`
-}
-
-func (c *DatabaseConfig) setDefaults(v *viper.Viper) error {
- v.SetDefault("db", map[string]any{
- "max_idle_conn": 2,
- })
-
- // URL default is only set given that none of the alternative
- // database connections parameters are provided
- setDefaultURL := true
- for _, field := range []string{"name", "user", "password", "host", "port", "protocol"} {
- setDefaultURL = setDefaultURL && !v.IsSet("db."+field)
- }
-
- if setDefaultURL {
- dbRoot, err := defaultDatabaseRoot()
- if err != nil {
- return fmt.Errorf("getting default database directory: %w", err)
- }
-
- path := filepath.ToSlash(filepath.Join(dbRoot, "flipt.db"))
- v.SetDefault("db.url", "file:"+path)
- }
-
- v.SetDefault("db.prepared_statements_enabled", true)
- return nil
-}
-
-func (c *DatabaseConfig) validate() (err error) {
- if c.URL == "" {
- if c.Protocol == 0 {
- return errFieldRequired("db.protocol")
- }
-
- if c.Host == "" {
- return errFieldRequired("db.host")
- }
-
- if c.Name == "" {
- return errFieldRequired("db.name")
- }
- }
-
- return
-}
-
-// DatabaseProtocol represents a database protocol
-type DatabaseProtocol uint8
-
-func (d DatabaseProtocol) String() string {
- return databaseProtocolToString[d]
-}
-
-func (d DatabaseProtocol) MarshalJSON() ([]byte, error) {
- return json.Marshal(d.String())
-}
-
-var (
- databaseProtocolToString = map[DatabaseProtocol]string{
- DatabaseSQLite: "sqlite",
- DatabaseLibSQL: "libsql",
- DatabasePostgres: "postgres",
- DatabaseMySQL: "mysql",
- DatabaseCockroachDB: "cockroachdb",
- }
-
- stringToDatabaseProtocol = map[string]DatabaseProtocol{
- "file": DatabaseSQLite,
- "sqlite": DatabaseSQLite,
- "libsql": DatabaseLibSQL,
- "postgres": DatabasePostgres,
- "mysql": DatabaseMySQL,
- "cockroachdb": DatabaseCockroachDB,
- "cockroach": DatabaseCockroachDB,
- }
-)
diff --git a/internal/config/database_default.go b/internal/config/database_default.go
deleted file mode 100644
index 8253584cca..0000000000
--- a/internal/config/database_default.go
+++ /dev/null
@@ -1,8 +0,0 @@
-//go:build !linux
-// +build !linux
-
-package config
-
-func defaultDatabaseRoot() (string, error) {
- return Dir()
-}
diff --git a/internal/config/database_linux.go b/internal/config/database_linux.go
deleted file mode 100644
index e5ebd8604c..0000000000
--- a/internal/config/database_linux.go
+++ /dev/null
@@ -1,24 +0,0 @@
-//go:build linux
-// +build linux
-
-package config
-
-import (
- "os"
-
- "github.com/spf13/afero"
-)
-
-func defaultDatabaseRoot() (string, error) {
- return findDatabaseRoot(afero.NewOsFs())
-}
-
-func findDatabaseRoot(fs afero.Fs) (string, error) {
- preferred := "/var/opt/flipt"
- if _, err := fs.Stat(preferred); os.IsNotExist(err) {
- // if /var/opt/flipt doesn't exist fallback to ~/.config/flipt.
- // It's the case when flipt runs locally in linux for testing with sqlite.
- return Dir()
- }
- return preferred, nil
-}
diff --git a/internal/config/database_linux_test.go b/internal/config/database_linux_test.go
deleted file mode 100644
index 7ea08c0ae7..0000000000
--- a/internal/config/database_linux_test.go
+++ /dev/null
@@ -1,31 +0,0 @@
-//go:build linux
-// +build linux
-
-package config
-
-import (
- "testing"
-
- "github.com/spf13/afero"
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
-)
-
-func TestDefaultDatabaseRoot(t *testing.T) {
- cfgDir, err := Dir()
- require.NoError(t, err)
-
- root, err := defaultDatabaseRoot()
- require.NoError(t, err)
- assert.Equal(t, cfgDir, root)
-}
-
-func TestFindDatabaseRoot(t *testing.T) {
- mockFS := afero.NewMemMapFs()
- err := mockFS.MkdirAll("/var/opt/flipt", 0000)
- require.NoError(t, err)
-
- root, err := findDatabaseRoot(mockFS)
- require.NoError(t, err)
- assert.Equal(t, "/var/opt/flipt", root)
-}
diff --git a/internal/config/database_test.go b/internal/config/database_test.go
deleted file mode 100644
index db12a07513..0000000000
--- a/internal/config/database_test.go
+++ /dev/null
@@ -1,23 +0,0 @@
-//go:build !linux
-// +build !linux
-
-package config
-
-import (
- "os"
- "path/filepath"
- "testing"
-
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
-)
-
-func TestDefaultDatabaseRoot(t *testing.T) {
- root, err := defaultDatabaseRoot()
- require.NoError(t, err)
-
- configDir, err := os.UserConfigDir()
- require.NoError(t, err)
-
- assert.Equal(t, root, filepath.Join(configDir, "flipt"))
-}
diff --git a/internal/config/deprecations.go b/internal/config/deprecations.go
index 271c5cb629..c956c7099c 100644
--- a/internal/config/deprecations.go
+++ b/internal/config/deprecations.go
@@ -8,11 +8,8 @@ import (
type deprecated string
var (
- deprecateAuthenticationExcludeMetdata deprecated = "authentication.exclude.metadata"
// fields that are deprecated along with their messages
- deprecatedFields = map[deprecated]string{
- deprecateAuthenticationExcludeMetdata: "This feature never worked as intended. Metadata can no longer be excluded from authentication (when required).",
- }
+ deprecatedFields = map[deprecated]string{}
)
const (
diff --git a/internal/config/server.go b/internal/config/server.go
index c2a1886d93..2d81593c9b 100644
--- a/internal/config/server.go
+++ b/internal/config/server.go
@@ -1,7 +1,6 @@
package config
import (
- "encoding/json"
"os"
"time"
@@ -69,36 +68,15 @@ func (c *ServerConfig) validate() error {
}
// Scheme is either HTTP or HTTPS.
-// TODO: can we use a string instead?
-type Scheme uint
+type Scheme string
func (s Scheme) String() string {
- return schemeToString[s]
-}
-
-func (s Scheme) MarshalJSON() ([]byte, error) {
- return json.Marshal(s.String())
-}
-
-func (s Scheme) MarshalYAML() (interface{}, error) {
- return s.String(), nil
+ return string(s)
}
const (
- HTTP Scheme = iota
- HTTPS
-)
-
-var (
- schemeToString = map[Scheme]string{
- HTTP: "http",
- HTTPS: "https",
- }
-
- stringToScheme = map[string]Scheme{
- "http": HTTP,
- "https": HTTPS,
- }
+ HTTP Scheme = "http"
+ HTTPS Scheme = "https"
)
type CloudServerConfig struct {
diff --git a/internal/config/storage.go b/internal/config/storage.go
index f5c5685018..e6030778ac 100644
--- a/internal/config/storage.go
+++ b/internal/config/storage.go
@@ -3,12 +3,9 @@ package config
import (
"errors"
"fmt"
- "os"
- "path/filepath"
"time"
"github.com/spf13/viper"
- "go.flipt.io/flipt/internal/oci"
)
var (
@@ -19,11 +16,9 @@ var (
type StorageType string
const (
- DatabaseStorageType = StorageType("database")
- LocalStorageType = StorageType("local")
- GitStorageType = StorageType("git")
- ObjectStorageType = StorageType("object")
- OCIStorageType = StorageType("oci")
+ LocalStorageType = StorageType("local")
+ GitStorageType = StorageType("git")
+ ObjectStorageType = StorageType("object")
)
type ObjectSubStorageType string
@@ -37,12 +32,10 @@ const (
// StorageConfig contains fields which will configure the type of backend in which Flipt will serve
// flag state.
type StorageConfig struct {
- Type StorageType `json:"type,omitempty" mapstructure:"type" yaml:"type,omitempty"`
- Local *StorageLocalConfig `json:"local,omitempty" mapstructure:"local,omitempty" yaml:"local,omitempty"`
- Git *StorageGitConfig `json:"git,omitempty" mapstructure:"git,omitempty" yaml:"git,omitempty"`
- Object *StorageObjectConfig `json:"object,omitempty" mapstructure:"object,omitempty" yaml:"object,omitempty"`
- OCI *StorageOCIConfig `json:"oci,omitempty" mapstructure:"oci,omitempty" yaml:"oci,omitempty"`
- ReadOnly *bool `json:"readOnly,omitempty" mapstructure:"read_only,omitempty" yaml:"read_only,omitempty"`
+ Type StorageType `json:"type,omitempty" mapstructure:"type" yaml:"type,omitempty"`
+ Local *StorageLocalConfig `json:"local,omitempty" mapstructure:"local,omitempty" yaml:"local,omitempty"`
+ Git *StorageGitConfig `json:"git,omitempty" mapstructure:"git,omitempty" yaml:"git,omitempty"`
+ Object *StorageObjectConfig `json:"object,omitempty" mapstructure:"object,omitempty" yaml:"object,omitempty"`
}
func (c *StorageConfig) Info() map[string]string {
@@ -83,22 +76,6 @@ func (c *StorageConfig) setDefaults(v *viper.Viper) error {
v.SetDefault("storage.object.googlecloud.poll_interval", "1m")
}
- case string(OCIStorageType):
- v.SetDefault("storage.oci.poll_interval", "30s")
- v.SetDefault("storage.oci.manifest_version", "1.1")
-
- dir, err := DefaultBundleDir()
- if err != nil {
- return err
- }
-
- v.SetDefault("storage.oci.bundles_directory", dir)
-
- if v.GetString("storage.oci.authentication.username") != "" ||
- v.GetString("storage.oci.authentication.password") != "" {
- v.SetDefault("storage.oci.authentication.type", oci.AuthenticationTypeStatic)
- }
-
default:
v.SetDefault("storage.type", "database")
}
@@ -135,27 +112,6 @@ func (c *StorageConfig) validate() error {
if err := c.Object.validate(); err != nil {
return err
}
- case OCIStorageType:
- if c.OCI.Repository == "" {
- return errors.New("oci storage repository must be specified")
- }
-
- if c.OCI.ManifestVersion != OCIManifestVersion10 && c.OCI.ManifestVersion != OCIManifestVersion11 {
- return errors.New("wrong manifest version, it should be 1.0 or 1.1")
- }
-
- if _, err := oci.ParseReference(c.OCI.Repository); err != nil {
- return fmt.Errorf("validating OCI configuration: %w", err)
- }
-
- if c.OCI.Authentication != nil && !c.OCI.Authentication.Type.IsValid() {
- return errors.New("oci authentication type is not supported")
- }
- }
-
- // setting read only mode is only supported with database storage
- if c.ReadOnly != nil && !*c.ReadOnly && c.Type != DatabaseStorageType {
- return errors.New("setting read only mode is only supported with database storage")
}
return nil
@@ -346,47 +302,3 @@ func (a SSHAuth) validate() (err error) {
return nil
}
-
-type OCIManifestVersion string
-
-const (
- OCIManifestVersion10 OCIManifestVersion = "1.0"
- OCIManifestVersion11 OCIManifestVersion = "1.1"
-)
-
-// StorageOCIConfig provides configuration support for StorageOCIConfig target registries as a backend store for Flipt.
-type StorageOCIConfig struct {
- // Repository is the target repository and reference to track.
- // It should be in the form [/][:].
- // When the registry is omitted, the bundle is referenced via the local bundle store.
- // Tag defaults to 'latest' when not supplied.
- Repository string `json:"repository,omitempty" mapstructure:"repository" yaml:"repository,omitempty"`
- // BundlesDirectory is the root directory in which Flipt will store and access local feature bundles.
- BundlesDirectory string `json:"bundlesDirectory,omitempty" mapstructure:"bundles_directory" yaml:"bundles_directory,omitempty"`
- // Authentication configures authentication credentials for accessing the target registry
- Authentication *OCIAuthentication `json:"-" mapstructure:"authentication" yaml:"-"`
- PollInterval time.Duration `json:"pollInterval,omitempty" mapstructure:"poll_interval" yaml:"poll_interval,omitempty"`
- // ManifestVersion defines which OCI Manifest version to use.
- ManifestVersion OCIManifestVersion `json:"manifestVersion,omitempty" mapstructure:"manifest_version" yaml:"manifest_version,omitempty"`
-}
-
-// OCIAuthentication configures the credentials for authenticating against a target OCI regitstry
-type OCIAuthentication struct {
- Type oci.AuthenticationType `json:"-" mapstructure:"type" yaml:"-"`
- Username string `json:"-" mapstructure:"username" yaml:"-"`
- Password string `json:"-" mapstructure:"password" yaml:"-"`
-}
-
-func DefaultBundleDir() (string, error) {
- dir, err := Dir()
- if err != nil {
- return "", err
- }
-
- bundlesDir := filepath.Join(dir, "bundles")
- if err := os.MkdirAll(bundlesDir, 0755); err != nil {
- return "", fmt.Errorf("creating image directory: %w", err)
- }
-
- return bundlesDir, nil
-}
diff --git a/internal/config/storage_test.go b/internal/config/storage_test.go
index 51c5713b3a..7fa21ade7b 100644
--- a/internal/config/storage_test.go
+++ b/internal/config/storage_test.go
@@ -11,7 +11,6 @@ func TestStorageConfigInfo(t *testing.T) {
config StorageConfig
expected map[string]string
}{
- {StorageConfig{Type: DatabaseStorageType}, nil},
{StorageConfig{Type: GitStorageType, Git: &StorageGitConfig{Repository: "repo1", Ref: "v1.0.0"}}, map[string]string{
"ref": "v1.0.0", "repository": "repo1",
}},
diff --git a/internal/config/testdata/deprecated/authentication_excluding_metadata.yml b/internal/config/testdata/deprecated/authentication_excluding_metadata.yml
deleted file mode 100644
index aa7a344084..0000000000
--- a/internal/config/testdata/deprecated/authentication_excluding_metadata.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-authentication:
- required: true
- exclude:
- metadata: true
diff --git a/internal/config/testdata/deprecated/cache_memory_enabled.yml b/internal/config/testdata/deprecated/cache_memory_enabled.yml
deleted file mode 100644
index 4c3d9d488e..0000000000
--- a/internal/config/testdata/deprecated/cache_memory_enabled.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-cache:
- memory:
- enabled: true
- expiration: -1s
diff --git a/internal/config/testdata/deprecated/cache_memory_items.yml b/internal/config/testdata/deprecated/cache_memory_items.yml
deleted file mode 100644
index 964bd94bd8..0000000000
--- a/internal/config/testdata/deprecated/cache_memory_items.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-cache:
- memory:
- enabled: false
- items: 500
diff --git a/internal/config/testdata/deprecated/database_migrations_path.yml b/internal/config/testdata/deprecated/database_migrations_path.yml
deleted file mode 100644
index f4a4d0edcb..0000000000
--- a/internal/config/testdata/deprecated/database_migrations_path.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-db:
- migrations_path: "../config/migrations"
diff --git a/internal/config/testdata/deprecated/database_migrations_path_legacy.yml b/internal/config/testdata/deprecated/database_migrations_path_legacy.yml
deleted file mode 100644
index 8b22491511..0000000000
--- a/internal/config/testdata/deprecated/database_migrations_path_legacy.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-db:
- migrations:
- path: "../config/migrations"
diff --git a/internal/config/testdata/deprecated/experimental_filesystem_storage.yml b/internal/config/testdata/deprecated/experimental_filesystem_storage.yml
deleted file mode 100644
index 347db7aa06..0000000000
--- a/internal/config/testdata/deprecated/experimental_filesystem_storage.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-experimental:
- filesystem_storage:
- enabled: true
diff --git a/internal/config/testdata/deprecated/tracing_jaeger.yml b/internal/config/testdata/deprecated/tracing_jaeger.yml
deleted file mode 100644
index bffd31168d..0000000000
--- a/internal/config/testdata/deprecated/tracing_jaeger.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-tracing:
- enabled: true
- exporter: "jaeger"
diff --git a/internal/config/testdata/deprecated/ui_disabled.yml b/internal/config/testdata/deprecated/ui_disabled.yml
deleted file mode 100644
index a94f518451..0000000000
--- a/internal/config/testdata/deprecated/ui_disabled.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-ui:
- enabled: false
diff --git a/internal/config/tracing.go b/internal/config/tracing.go
index d5361a1875..783f647cd6 100644
--- a/internal/config/tracing.go
+++ b/internal/config/tracing.go
@@ -1,7 +1,6 @@
package config
import (
- "encoding/json"
"errors"
"fmt"
@@ -63,16 +62,6 @@ func (c *TracingConfig) validate() error {
return nil
}
-func (c *TracingConfig) deprecations(v *viper.Viper) []deprecated {
- var deprecations []deprecated
-
- if v.GetString("tracing.exporter") == TracingJaeger.String() && v.GetBool("tracing.enabled") {
- deprecations = append(deprecations, "tracing.exporter.jaeger")
- }
-
- return deprecations
-}
-
// IsZero returns true if the tracing config is not enabled.
// This is used for marshalling to YAML for `config init`.
func (c TracingConfig) IsZero() bool {
@@ -80,44 +69,17 @@ func (c TracingConfig) IsZero() bool {
}
// TracingExporter represents the supported tracing exporters.
-// TODO: can we use a string here instead?
-type TracingExporter uint8
-
-func (e TracingExporter) String() string {
- return tracingExporterToString[e]
-}
-
-func (e TracingExporter) MarshalJSON() ([]byte, error) {
- return json.Marshal(e.String())
-}
-
-func (e TracingExporter) MarshalYAML() (interface{}, error) {
- return e.String(), nil
-}
+type TracingExporter string
const (
- _ TracingExporter = iota
- // TracingJaeger ...
- TracingJaeger
- // TracingZipkin ...
- TracingZipkin
- // TracingOTLP ...
- TracingOTLP
+ TracingJaeger TracingExporter = "jaeger"
+ TracingZipkin TracingExporter = "zipkin"
+ TracingOTLP TracingExporter = "otlp"
)
-var (
- tracingExporterToString = map[TracingExporter]string{
- TracingJaeger: "jaeger",
- TracingZipkin: "zipkin",
- TracingOTLP: "otlp",
- }
-
- stringToTracingExporter = map[string]TracingExporter{
- "jaeger": TracingJaeger,
- "zipkin": TracingZipkin,
- "otlp": TracingOTLP,
- }
-)
+func (e TracingExporter) String() string {
+ return string(e)
+}
type TracingPropagator string
diff --git a/internal/ext/exporter.go b/internal/ext/exporter.go
deleted file mode 100644
index 295e255c4b..0000000000
--- a/internal/ext/exporter.go
+++ /dev/null
@@ -1,353 +0,0 @@
-package ext
-
-import (
- "context"
- "encoding/json"
- "fmt"
- "io"
- "slices"
- "strings"
-
- "github.com/blang/semver/v4"
- "go.flipt.io/flipt/rpc/flipt"
-)
-
-const defaultBatchSize = 25
-
-var (
- v1_0 = semver.Version{Major: 1}
- v1_1 = semver.Version{Major: 1, Minor: 1}
- v1_2 = semver.Version{Major: 1, Minor: 2}
- v1_3 = semver.Version{Major: 1, Minor: 3}
- v1_4 = semver.Version{Major: 1, Minor: 4}
- latestVersion = v1_4
-
- supportedVersions = semver.Versions{
- v1_0,
- v1_1,
- v1_2,
- v1_3,
- latestVersion,
- }
-)
-
-type Lister interface {
- GetNamespace(context.Context, *flipt.GetNamespaceRequest) (*flipt.Namespace, error)
- ListNamespaces(context.Context, *flipt.ListNamespaceRequest) (*flipt.NamespaceList, error)
- ListFlags(context.Context, *flipt.ListFlagRequest) (*flipt.FlagList, error)
- ListSegments(context.Context, *flipt.ListSegmentRequest) (*flipt.SegmentList, error)
- ListRules(context.Context, *flipt.ListRuleRequest) (*flipt.RuleList, error)
- ListRollouts(context.Context, *flipt.ListRolloutRequest) (*flipt.RolloutList, error)
-}
-
-type Exporter struct {
- store Lister
- batchSize int32
- namespaceKeys []string
- allNamespaces bool
- sortByKey bool
-}
-
-func NewExporter(store Lister, namespaces string, allNamespaces, sortByKey bool) *Exporter {
- ns := strings.Split(namespaces, ",")
-
- return &Exporter{
- store: store,
- batchSize: defaultBatchSize,
- namespaceKeys: ns,
- allNamespaces: allNamespaces,
- sortByKey: sortByKey,
- }
-}
-
-// We currently only do minor bumps and print out just major.minor
-func versionString(v semver.Version) string {
- return fmt.Sprintf("%d.%d", v.Major, v.Minor)
-}
-
-func (e *Exporter) Export(ctx context.Context, encoding Encoding, w io.Writer) error {
- var (
- enc = encoding.NewEncoder(w)
- batchSize = e.batchSize
- )
-
- defer enc.Close()
-
- namespaces := make([]*Namespace, 0)
-
- // If allNamespaces is "true", then retrieve all the namespaces, and store them in a slice.
- if e.allNamespaces {
- var (
- remaining = true
- nextPage string
- )
-
- for remaining {
- resp, err := e.store.ListNamespaces(ctx, &flipt.ListNamespaceRequest{
- PageToken: nextPage,
- Limit: batchSize,
- })
- if err != nil {
- return fmt.Errorf("getting namespaces: %w", err)
- }
-
- nextPage := resp.NextPageToken
- remaining = nextPage != ""
-
- for _, ns := range resp.Namespaces {
- namespaces = append(namespaces, &Namespace{
- Key: ns.Key,
- Name: ns.Name,
- Description: ns.Description,
- })
- }
- }
-
- // sort namespaces by key if sorting is enabled
- if e.sortByKey {
- slices.SortStableFunc(namespaces, func(i, j *Namespace) int {
- return strings.Compare(i.Key, j.Key)
- })
- }
- } else {
- // If allNamespaces is "false", then retrieve the namespaces specified in the namespaceKeys slice.
- for _, key := range e.namespaceKeys {
- resp, err := e.store.GetNamespace(ctx, &flipt.GetNamespaceRequest{
- Key: key,
- })
- if err != nil {
- return fmt.Errorf("getting namespaces: %w", err)
- }
-
- namespaces = append(namespaces, &Namespace{
- Key: resp.Key,
- Name: resp.Name,
- Description: resp.Description,
- })
- }
- }
-
- for i := 0; i < len(namespaces); i++ {
- doc := new(Document)
- // Only provide the version to the first document in the stream.
- if i == 0 {
- doc.Version = versionString(latestVersion)
- }
- ns := namespaces[i]
- doc.Namespace = &NamespaceEmbed{
- IsNamespace: ns,
- }
-
- var (
- remaining = true
- nextPage string
- )
-
- // export flags/variants in batches
- for batch := int32(0); remaining; batch++ {
- resp, err := e.store.ListFlags(
- ctx,
- &flipt.ListFlagRequest{
- NamespaceKey: ns.Key,
- PageToken: nextPage,
- Limit: batchSize,
- },
- )
- if err != nil {
- return fmt.Errorf("getting flags: %w", err)
- }
-
- flags := resp.Flags
- nextPage = resp.NextPageToken
- remaining = nextPage != ""
-
- for _, f := range flags {
- flag := &Flag{
- Key: f.Key,
- Name: f.Name,
- Type: f.Type.String(),
- Description: f.Description,
- Enabled: f.Enabled,
- Metadata: f.Metadata.AsMap(),
- }
-
- // map variant id => variant key
- variantKeys := make(map[string]string)
-
- // sort variants by key if sorting is enabled
- if e.sortByKey {
- slices.SortStableFunc(f.Variants, func(i, j *flipt.Variant) int {
- return strings.Compare(i.Key, j.Key)
- })
- }
-
- for _, v := range f.Variants {
- var attachment interface{}
-
- if v.Attachment != "" {
- if err := json.Unmarshal([]byte(v.Attachment), &attachment); err != nil {
- return fmt.Errorf("unmarshaling variant attachment: %w", err)
- }
- }
-
- defaultVariant := false
- if f.DefaultVariant != nil {
- defaultVariant = f.DefaultVariant.Id == v.Id
- }
-
- flag.Variants = append(flag.Variants, &Variant{
- Default: defaultVariant,
- Key: v.Key,
- Name: v.Name,
- Description: v.Description,
- Attachment: attachment,
- })
-
- variantKeys[v.Id] = v.Key
- }
-
- // export rules for flag
- resp, err := e.store.ListRules(
- ctx,
- &flipt.ListRuleRequest{
- NamespaceKey: ns.Key,
- FlagKey: flag.Key,
- },
- )
- if err != nil {
- return fmt.Errorf("getting rules for flag %q: %w", flag.Key, err)
- }
-
- rules := resp.Rules
- for _, r := range rules {
- rule := &Rule{}
-
- switch {
- case r.SegmentKey != "":
- rule.Segment = &SegmentEmbed{
- IsSegment: SegmentKey(r.SegmentKey),
- }
- case len(r.SegmentKeys) > 0:
- rule.Segment = &SegmentEmbed{
- IsSegment: &Segments{
- Keys: r.SegmentKeys,
- SegmentOperator: r.SegmentOperator.String(),
- },
- }
- default:
- return fmt.Errorf("wrong format for rule segments %v for flagKey %v", r.Id, flag.Key)
- }
-
- for _, d := range r.Distributions {
- rule.Distributions = append(rule.Distributions, &Distribution{
- VariantKey: variantKeys[d.VariantId],
- Rollout: d.Rollout,
- })
- }
-
- flag.Rules = append(flag.Rules, rule)
- }
-
- rollouts, err := e.store.ListRollouts(ctx, &flipt.ListRolloutRequest{
- NamespaceKey: ns.Key,
- FlagKey: flag.Key,
- })
- if err != nil {
- return fmt.Errorf("getting rollout rules for flag %q: %w", flag.Key, err)
- }
-
- for _, r := range rollouts.Rules {
- rollout := Rollout{
- Description: r.Description,
- }
-
- switch rule := r.Rule.(type) {
- case *flipt.Rollout_Segment:
- rollout.Segment = &SegmentRule{
- Value: rule.Segment.Value,
- }
-
- if rule.Segment.SegmentKey != "" {
- rollout.Segment.Key = rule.Segment.SegmentKey
- } else if len(rule.Segment.SegmentKeys) > 0 {
- rollout.Segment.Keys = rule.Segment.SegmentKeys
- }
-
- if rule.Segment.SegmentOperator == flipt.SegmentOperator_AND_SEGMENT_OPERATOR {
- rollout.Segment.Operator = rule.Segment.SegmentOperator.String()
- }
- case *flipt.Rollout_Threshold:
- rollout.Threshold = &ThresholdRule{
- Percentage: rule.Threshold.Percentage,
- Value: rule.Threshold.Value,
- }
- }
-
- flag.Rollouts = append(flag.Rollouts, &rollout)
- }
-
- doc.Flags = append(doc.Flags, flag)
- }
- }
-
- remaining = true
- nextPage = ""
-
- // export segments/constraints in batches
- for remaining {
- resp, err := e.store.ListSegments(
- ctx,
- &flipt.ListSegmentRequest{
- NamespaceKey: ns.Key,
- PageToken: nextPage,
- Limit: batchSize,
- },
- )
- if err != nil {
- return fmt.Errorf("getting segments: %w", err)
- }
-
- segments := resp.Segments
- nextPage = resp.NextPageToken
- remaining = nextPage != ""
-
- for _, s := range segments {
- segment := &Segment{
- Key: s.Key,
- Name: s.Name,
- Description: s.Description,
- MatchType: s.MatchType.String(),
- }
-
- for _, c := range s.Constraints {
- segment.Constraints = append(segment.Constraints, &Constraint{
- Type: c.Type.String(),
- Property: c.Property,
- Operator: c.Operator,
- Value: c.Value,
- Description: c.Description,
- })
- }
-
- doc.Segments = append(doc.Segments, segment)
- }
- }
-
- // sort flags and segments by key if sorting is enabled
- if e.sortByKey {
- slices.SortStableFunc(doc.Flags, func(i, j *Flag) int {
- return strings.Compare(i.Key, j.Key)
- })
-
- slices.SortStableFunc(doc.Segments, func(i, j *Segment) int {
- return strings.Compare(i.Key, j.Key)
- })
- }
-
- if err := enc.Encode(doc); err != nil {
- return fmt.Errorf("marshaling document: %w", err)
- }
- }
-
- return nil
-}
diff --git a/internal/ext/exporter_test.go b/internal/ext/exporter_test.go
deleted file mode 100644
index 254edee3c2..0000000000
--- a/internal/ext/exporter_test.go
+++ /dev/null
@@ -1,1753 +0,0 @@
-package ext
-
-import (
- "bytes"
- "context"
- "errors"
- "fmt"
- "io"
- "os"
- "sort"
- "strings"
- "testing"
-
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
- "go.flipt.io/flipt/rpc/flipt"
- "google.golang.org/protobuf/types/known/structpb"
-)
-
-type mockLister struct {
- namespaces map[string]*flipt.Namespace
-
- nsToFlags map[string][]*flipt.Flag
- nsToSegments map[string][]*flipt.Segment
- nsToRules map[string][]*flipt.Rule
- nsToRollouts map[string][]*flipt.Rollout
-}
-
-func (m mockLister) GetNamespace(_ context.Context, r *flipt.GetNamespaceRequest) (*flipt.Namespace, error) {
- for k, ns := range m.namespaces {
- // split the key by _ to get the namespace key, as its prefixed with an index
- key := strings.Split(k, "_")[1]
- if r.Key == key {
- return ns, nil
- }
- }
-
- return nil, fmt.Errorf("namespace %s not found", r.Key)
-}
-
-func (m mockLister) ListNamespaces(_ context.Context, _ *flipt.ListNamespaceRequest) (*flipt.NamespaceList, error) {
- var (
- keys []string
- namespaces []*flipt.Namespace
- )
-
- // sort the namespaces by key, as they are prefixed with an index
- for k := range m.namespaces {
- keys = append(keys, k)
- }
-
- sort.Slice(keys, func(i, j int) bool {
- return keys[i] < keys[j]
- })
-
- // remove the index prefix from the key
- for _, k := range keys {
- namespaces = append(namespaces, m.namespaces[k])
- }
-
- return &flipt.NamespaceList{
- Namespaces: namespaces,
- }, nil
-}
-
-func (m mockLister) ListFlags(_ context.Context, listRequest *flipt.ListFlagRequest) (*flipt.FlagList, error) {
- flags := m.nsToFlags[listRequest.NamespaceKey]
-
- return &flipt.FlagList{
- Flags: flags,
- }, nil
-}
-
-func (m mockLister) ListRules(_ context.Context, listRequest *flipt.ListRuleRequest) (*flipt.RuleList, error) {
- rules := m.nsToRules[listRequest.NamespaceKey]
-
- if listRequest.FlagKey == "flag1" {
- return &flipt.RuleList{
- Rules: rules,
- }, nil
- }
-
- return &flipt.RuleList{}, nil
-}
-
-func (m mockLister) ListSegments(_ context.Context, listRequest *flipt.ListSegmentRequest) (*flipt.SegmentList, error) {
- segments := m.nsToSegments[listRequest.NamespaceKey]
-
- return &flipt.SegmentList{
- Segments: segments,
- }, nil
-}
-
-func (m mockLister) ListRollouts(_ context.Context, listRequest *flipt.ListRolloutRequest) (*flipt.RolloutList, error) {
- rollouts := m.nsToRollouts[listRequest.NamespaceKey]
-
- if listRequest.FlagKey == "flag2" {
- return &flipt.RolloutList{
- Rules: rollouts[0:2],
- }, nil
- }
-
- if listRequest.FlagKey == "FLag2" {
- return &flipt.RolloutList{
- Rules: rollouts[2:4],
- }, nil
- }
-
- return &flipt.RolloutList{}, nil
-}
-
-func newStruct(t testing.TB, m map[string]any) *structpb.Struct {
- t.Helper()
- value, err := structpb.NewStruct(m)
- require.NoError(t, err)
- return value
-}
-
-func TestExport(t *testing.T) {
- tests := []struct {
- name string
- lister mockLister
- path string
- namespaces string
- allNamespaces bool
- sortByKey bool
- }{
- {
- name: "single default namespace",
- lister: mockLister{
- namespaces: map[string]*flipt.Namespace{
- "0_default": {
- Key: "default",
- Name: "default",
- Description: "default namespace",
- },
- },
- nsToFlags: map[string][]*flipt.Flag{
- "default": {
- {
- Key: "flag1",
- Name: "flag1",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Description: "description",
- Enabled: true,
- DefaultVariant: &flipt.Variant{
- Id: "2",
- Key: "foo",
- },
- Variants: []*flipt.Variant{
- {
- Id: "1",
- Key: "variant1",
- Name: "variant1",
- Attachment: `{
- "pi": 3.141,
- "happy": true,
- "name": "Niels",
- "nothing": null,
- "answer": {
- "everything": 42
- },
- "list": [1, 0, 2],
- "object": {
- "currency": "USD",
- "value": 42.99
- }
- }`,
- },
- {
- Id: "2",
- Key: "foo",
- },
- },
- Metadata: newStruct(t, map[string]any{"label": "variant", "area": true}),
- },
- {
- Key: "flag2",
- Name: "flag2",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Description: "a boolean flag",
- Enabled: false,
- Metadata: newStruct(t, map[string]any{"label": "bool", "area": 12}),
- },
- },
- },
- nsToSegments: map[string][]*flipt.Segment{
- "default": {
- {
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- Constraints: []*flipt.Constraint{
- {
- Id: "1",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "eq",
- Value: "baz",
- Description: "desc",
- },
- {
- Id: "2",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "fizz",
- Operator: "neq",
- Value: "buzz",
- Description: "desc",
- },
- },
- },
- {
- Key: "segment2",
- Name: "segment2",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- },
- },
- nsToRules: map[string][]*flipt.Rule{
- "default": {
- {
- Id: "1",
- SegmentKey: "segment1",
- Rank: 1,
- Distributions: []*flipt.Distribution{
- {
- Id: "1",
- VariantId: "1",
- RuleId: "1",
- Rollout: 100,
- },
- },
- },
- {
- Id: "2",
- SegmentKeys: []string{"segment1", "segment2"},
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- Rank: 2,
- },
- },
- },
-
- nsToRollouts: map[string][]*flipt.Rollout{
- "default": {
- {
- Id: "1",
- FlagKey: "flag2",
- Type: flipt.RolloutType_SEGMENT_ROLLOUT_TYPE,
- Description: "enabled for internal users",
- Rank: int32(1),
- Rule: &flipt.Rollout_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "internal_users",
- Value: true,
- },
- },
- },
- {
- Id: "2",
- FlagKey: "flag2",
- Type: flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE,
- Description: "enabled for 50%",
- Rank: int32(2),
- Rule: &flipt.Rollout_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: float32(50.0),
- Value: true,
- },
- },
- },
- },
- },
- },
- path: "testdata/export",
- namespaces: "default",
- allNamespaces: false,
- sortByKey: false,
- },
- {
- name: "multiple namespaces",
- lister: mockLister{
- namespaces: map[string]*flipt.Namespace{
- "0_default": {
- Key: "default",
- Name: "default",
- Description: "default namespace",
- },
- "1_foo": {
- Key: "foo",
- Name: "foo",
- Description: "foo namespace",
- },
- },
- nsToFlags: map[string][]*flipt.Flag{
- "default": {
- {
- Key: "flag1",
- Name: "flag1",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Description: "description",
- Enabled: true,
- Variants: []*flipt.Variant{
- {
- Id: "1",
- Key: "variant1",
- Name: "variant1",
- Attachment: `{
- "pi": 3.141,
- "happy": true,
- "name": "Niels",
- "nothing": null,
- "answer": {
- "everything": 42
- },
- "list": [1, 0, 2],
- "object": {
- "currency": "USD",
- "value": 42.99
- }
- }`,
- },
- {
- Id: "2",
- Key: "foo",
- },
- },
- },
- {
- Key: "flag2",
- Name: "flag2",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Description: "a boolean flag",
- Enabled: false,
- },
- },
- "foo": {
- {
- Key: "flag1",
- Name: "flag1",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Description: "description",
- Enabled: true,
- Variants: []*flipt.Variant{
- {
- Id: "1",
- Key: "variant1",
- Name: "variant1",
- Attachment: `{
- "pi": 3.141,
- "happy": true,
- "name": "Niels",
- "nothing": null,
- "answer": {
- "everything": 42
- },
- "list": [1, 0, 2],
- "object": {
- "currency": "USD",
- "value": 42.99
- }
- }`,
- },
- {
- Id: "2",
- Key: "foo",
- },
- },
- },
- {
- Key: "flag2",
- Name: "flag2",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Description: "a boolean flag",
- Enabled: false,
- },
- },
- },
- nsToSegments: map[string][]*flipt.Segment{
- "default": {
- {
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- Constraints: []*flipt.Constraint{
- {
- Id: "1",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "eq",
- Value: "baz",
- Description: "desc",
- },
- {
- Id: "2",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "fizz",
- Operator: "neq",
- Value: "buzz",
- Description: "desc",
- },
- },
- },
- {
- Key: "segment2",
- Name: "segment2",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- },
- "foo": {
- {
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- Constraints: []*flipt.Constraint{
- {
- Id: "1",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "eq",
- Value: "baz",
- Description: "desc",
- },
- {
- Id: "2",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "fizz",
- Operator: "neq",
- Value: "buzz",
- Description: "desc",
- },
- },
- },
- {
- Key: "segment2",
- Name: "segment2",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- },
- },
- nsToRules: map[string][]*flipt.Rule{
- "default": {
- {
- Id: "1",
- SegmentKey: "segment1",
- Rank: 1,
- Distributions: []*flipt.Distribution{
- {
- Id: "1",
- VariantId: "1",
- RuleId: "1",
- Rollout: 100,
- },
- },
- },
- {
- Id: "2",
- SegmentKeys: []string{"segment1", "segment2"},
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- Rank: 2,
- },
- },
- "foo": {
- {
- Id: "1",
- SegmentKey: "segment1",
- Rank: 1,
- Distributions: []*flipt.Distribution{
- {
- Id: "1",
- VariantId: "1",
- RuleId: "1",
- Rollout: 100,
- },
- },
- },
- {
- Id: "2",
- SegmentKeys: []string{"segment1", "segment2"},
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- Rank: 2,
- },
- },
- },
-
- nsToRollouts: map[string][]*flipt.Rollout{
- "default": {
- {
- Id: "1",
- FlagKey: "flag2",
- Type: flipt.RolloutType_SEGMENT_ROLLOUT_TYPE,
- Description: "enabled for internal users",
- Rank: int32(1),
- Rule: &flipt.Rollout_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "internal_users",
- Value: true,
- },
- },
- },
- {
- Id: "2",
- FlagKey: "flag2",
- Type: flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE,
- Description: "enabled for 50%",
- Rank: int32(2),
- Rule: &flipt.Rollout_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: float32(50.0),
- Value: true,
- },
- },
- },
- },
- "foo": {
- {
- Id: "1",
- FlagKey: "flag2",
- Type: flipt.RolloutType_SEGMENT_ROLLOUT_TYPE,
- Description: "enabled for internal users",
- Rank: int32(1),
- Rule: &flipt.Rollout_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "internal_users",
- Value: true,
- },
- },
- },
- {
- Id: "2",
- FlagKey: "flag2",
- Type: flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE,
- Description: "enabled for 50%",
- Rank: int32(2),
- Rule: &flipt.Rollout_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: float32(50.0),
- Value: true,
- },
- },
- },
- },
- },
- },
- path: "testdata/export_default_and_foo",
- namespaces: "default,foo",
- allNamespaces: false,
- sortByKey: false,
- },
- {
- name: "all namespaces",
- lister: mockLister{
- namespaces: map[string]*flipt.Namespace{
- "0_default": {
- Key: "default",
- Name: "default",
- Description: "default namespace",
- },
-
- "1_foo": {
- Key: "foo",
- Name: "foo",
- Description: "foo namespace",
- },
-
- "2_bar": {
- Key: "bar",
- Name: "bar",
- Description: "bar namespace",
- },
- },
- nsToFlags: map[string][]*flipt.Flag{
- "foo": {
- {
- Key: "flag1",
- Name: "flag1",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Description: "description",
- Enabled: true,
- Variants: []*flipt.Variant{
- {
- Id: "1",
- Key: "variant1",
- Name: "variant1",
- Attachment: `{
- "pi": 3.141,
- "happy": true,
- "name": "Niels",
- "nothing": null,
- "answer": {
- "everything": 42
- },
- "list": [1, 0, 2],
- "object": {
- "currency": "USD",
- "value": 42.99
- }
- }`,
- },
- {
- Id: "2",
- Key: "foo",
- },
- },
- },
- {
- Key: "flag2",
- Name: "flag2",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Description: "a boolean flag",
- Enabled: false,
- },
- },
- "bar": {
- {
- Key: "flag1",
- Name: "flag1",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Description: "description",
- Enabled: true,
- Variants: []*flipt.Variant{
- {
- Id: "1",
- Key: "variant1",
- Name: "variant1",
- Attachment: `{
- "pi": 3.141,
- "happy": true,
- "name": "Niels",
- "nothing": null,
- "answer": {
- "everything": 42
- },
- "list": [1, 0, 2],
- "object": {
- "currency": "USD",
- "value": 42.99
- }
- }`,
- },
- {
- Id: "2",
- Key: "foo",
- },
- },
- },
- {
- Key: "flag2",
- Name: "flag2",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Description: "a boolean flag",
- Enabled: false,
- },
- },
- },
- nsToSegments: map[string][]*flipt.Segment{
- "foo": {
- {
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- Constraints: []*flipt.Constraint{
- {
- Id: "1",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "eq",
- Value: "baz",
- Description: "desc",
- },
- {
- Id: "2",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "fizz",
- Operator: "neq",
- Value: "buzz",
- Description: "desc",
- },
- },
- },
- {
- Key: "segment2",
- Name: "segment2",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- },
- "bar": {
- {
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- Constraints: []*flipt.Constraint{
- {
- Id: "1",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "eq",
- Value: "baz",
- Description: "desc",
- },
- {
- Id: "2",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "fizz",
- Operator: "neq",
- Value: "buzz",
- Description: "desc",
- },
- },
- },
- {
- Key: "segment2",
- Name: "segment2",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- },
- },
- nsToRules: map[string][]*flipt.Rule{
- "foo": {
- {
- Id: "1",
- SegmentKey: "segment1",
- Rank: 1,
- Distributions: []*flipt.Distribution{
- {
- Id: "1",
- VariantId: "1",
- RuleId: "1",
- Rollout: 100,
- },
- },
- },
- {
- Id: "2",
- SegmentKeys: []string{"segment1", "segment2"},
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- Rank: 2,
- },
- },
- "bar": {
- {
- Id: "1",
- SegmentKey: "segment1",
- Rank: 1,
- Distributions: []*flipt.Distribution{
- {
- Id: "1",
- VariantId: "1",
- RuleId: "1",
- Rollout: 100,
- },
- },
- },
- {
- Id: "2",
- SegmentKeys: []string{"segment1", "segment2"},
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- Rank: 2,
- },
- },
- },
-
- nsToRollouts: map[string][]*flipt.Rollout{
- "foo": {
- {
- Id: "1",
- FlagKey: "flag2",
- Type: flipt.RolloutType_SEGMENT_ROLLOUT_TYPE,
- Description: "enabled for internal users",
- Rank: int32(1),
- Rule: &flipt.Rollout_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "internal_users",
- Value: true,
- },
- },
- },
- {
- Id: "2",
- FlagKey: "flag2",
- Type: flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE,
- Description: "enabled for 50%",
- Rank: int32(2),
- Rule: &flipt.Rollout_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: float32(50.0),
- Value: true,
- },
- },
- },
- },
- "bar": {
- {
- Id: "1",
- FlagKey: "flag2",
- Type: flipt.RolloutType_SEGMENT_ROLLOUT_TYPE,
- Description: "enabled for internal users",
- Rank: int32(1),
- Rule: &flipt.Rollout_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "internal_users",
- Value: true,
- },
- },
- },
- {
- Id: "2",
- FlagKey: "flag2",
- Type: flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE,
- Description: "enabled for 50%",
- Rank: int32(2),
- Rule: &flipt.Rollout_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: float32(50.0),
- Value: true,
- },
- },
- },
- },
- },
- },
- path: "testdata/export_all_namespaces",
- namespaces: "",
- allNamespaces: true,
- sortByKey: false,
- },
- {
- name: "single default namespace with sort by key",
- lister: mockLister{
- namespaces: map[string]*flipt.Namespace{
- "0_default": {
- Key: "default",
- Name: "default",
- Description: "default namespace",
- },
- },
- nsToFlags: map[string][]*flipt.Flag{
- "default": {
- {
- Key: "flag2",
- Name: "flag2",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Description: "a boolean flag",
- Enabled: false,
- Metadata: newStruct(t, map[string]any{"label": "bool", "area": 12}),
- },
- {
- Key: "flag1",
- Name: "flag1",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Description: "description",
- Enabled: true,
- DefaultVariant: &flipt.Variant{
- Id: "2",
- Key: "foo",
- },
- Variants: []*flipt.Variant{
- {
- Id: "1",
- Key: "variant1",
- Name: "variant1",
- Attachment: `{
- "pi": 3.141,
- "happy": true,
- "name": "Niels",
- "nothing": null,
- "answer": {
- "everything": 42
- },
- "list": [1, 0, 2],
- "object": {
- "currency": "USD",
- "value": 42.99
- }
- }`,
- },
- {
- Id: "2",
- Key: "foo",
- },
- },
- Metadata: newStruct(t, map[string]any{"label": "variant", "area": true}),
- },
- {
- Key: "FLag2",
- Name: "FLag2",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Description: "a boolean flag",
- Enabled: false,
- Metadata: newStruct(t, map[string]any{"label": "bool", "area": 12}),
- },
- },
- },
- nsToSegments: map[string][]*flipt.Segment{
- "default": {
- {
- Key: "segment2",
- Name: "segment2",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- {
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- Constraints: []*flipt.Constraint{
- {
- Id: "1",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "eq",
- Value: "baz",
- Description: "desc",
- },
- {
- Id: "2",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "fizz",
- Operator: "neq",
- Value: "buzz",
- Description: "desc",
- },
- },
- },
- },
- },
- nsToRules: map[string][]*flipt.Rule{
- "default": {
- {
- Id: "1",
- SegmentKey: "segment1",
- Rank: 1,
- Distributions: []*flipt.Distribution{
- {
- Id: "1",
- VariantId: "1",
- RuleId: "1",
- Rollout: 100,
- },
- },
- },
- {
- Id: "2",
- SegmentKeys: []string{"segment1", "segment2"},
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- Rank: 2,
- },
- },
- },
-
- nsToRollouts: map[string][]*flipt.Rollout{
- "default": {
- {
- Id: "1",
- FlagKey: "flag2",
- Type: flipt.RolloutType_SEGMENT_ROLLOUT_TYPE,
- Description: "enabled for internal users",
- Rank: int32(1),
- Rule: &flipt.Rollout_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "internal_users",
- Value: true,
- },
- },
- },
- {
- Id: "2",
- FlagKey: "flag2",
- Type: flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE,
- Description: "enabled for 50%",
- Rank: int32(2),
- Rule: &flipt.Rollout_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: float32(50.0),
- Value: true,
- },
- },
- },
- {
- Id: "3",
- FlagKey: "FLag2",
- Type: flipt.RolloutType_SEGMENT_ROLLOUT_TYPE,
- Description: "enabled for external users",
- Rank: int32(1),
- Rule: &flipt.Rollout_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "external_users",
- Value: true,
- },
- },
- },
- {
- Id: "4",
- FlagKey: "FLag2",
- Type: flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE,
- Description: "enabled for 60%",
- Rank: int32(2),
- Rule: &flipt.Rollout_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: float32(60.0),
- Value: true,
- },
- },
- },
- },
- },
- },
- path: "testdata/export_sorted",
- namespaces: "default",
- allNamespaces: false,
- sortByKey: true,
- },
- {
- name: "multiple namespaces with sort by key",
- lister: mockLister{
- namespaces: map[string]*flipt.Namespace{
- "1_foo": {
- Key: "foo",
- Name: "foo",
- Description: "foo namespace",
- },
- "0_default": {
- Key: "default",
- Name: "default",
- Description: "default namespace",
- },
- },
- nsToFlags: map[string][]*flipt.Flag{
- "default": {
- {
- Key: "flag2",
- Name: "flag2",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Description: "a boolean flag",
- Enabled: false,
- },
- {
- Key: "flag1",
- Name: "flag1",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Description: "description",
- Enabled: true,
- Variants: []*flipt.Variant{
- {
- Id: "1",
- Key: "variant1",
- Name: "variant1",
- Attachment: `{
- "pi": 3.141,
- "happy": true,
- "name": "Niels",
- "nothing": null,
- "answer": {
- "everything": 42
- },
- "list": [1, 0, 2],
- "object": {
- "currency": "USD",
- "value": 42.99
- }
- }`,
- },
- {
- Id: "2",
- Key: "foo",
- },
- },
- },
- {
- Key: "FLag2",
- Name: "FLag2",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Description: "a boolean flag",
- Enabled: false,
- Metadata: newStruct(t, map[string]any{"label": "bool", "area": 12}),
- },
- },
- "foo": {
- {
- Key: "flag2",
- Name: "flag2",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Description: "a boolean flag",
- Enabled: false,
- },
- {
- Key: "flag1",
- Name: "flag1",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Description: "description",
- Enabled: true,
- Variants: []*flipt.Variant{
- {
- Id: "1",
- Key: "variant1",
- Name: "variant1",
- Attachment: `{
- "pi": 3.141,
- "happy": true,
- "name": "Niels",
- "nothing": null,
- "answer": {
- "everything": 42
- },
- "list": [1, 0, 2],
- "object": {
- "currency": "USD",
- "value": 42.99
- }
- }`,
- },
- {
- Id: "2",
- Key: "foo",
- },
- },
- },
- {
- Key: "FLag2",
- Name: "FLag2",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Description: "a boolean flag",
- Enabled: false,
- Metadata: newStruct(t, map[string]any{"label": "bool", "area": 12}),
- },
- },
- },
- nsToSegments: map[string][]*flipt.Segment{
- "default": {
- {
- Key: "segment2",
- Name: "segment2",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- {
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- Constraints: []*flipt.Constraint{
- {
- Id: "1",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "eq",
- Value: "baz",
- Description: "desc",
- },
- {
- Id: "2",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "fizz",
- Operator: "neq",
- Value: "buzz",
- Description: "desc",
- },
- },
- },
- },
- "foo": {
- {
- Key: "segment2",
- Name: "segment2",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- {
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- Constraints: []*flipt.Constraint{
- {
- Id: "1",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "eq",
- Value: "baz",
- Description: "desc",
- },
- {
- Id: "2",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "fizz",
- Operator: "neq",
- Value: "buzz",
- Description: "desc",
- },
- },
- },
- },
- },
- nsToRules: map[string][]*flipt.Rule{
- "default": {
- {
- Id: "1",
- SegmentKey: "segment1",
- Rank: 1,
- Distributions: []*flipt.Distribution{
- {
- Id: "1",
- VariantId: "1",
- RuleId: "1",
- Rollout: 100,
- },
- },
- },
- {
- Id: "2",
- SegmentKeys: []string{"segment1", "segment2"},
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- Rank: 2,
- },
- },
- "foo": {
- {
- Id: "1",
- SegmentKey: "segment1",
- Rank: 1,
- Distributions: []*flipt.Distribution{
- {
- Id: "1",
- VariantId: "1",
- RuleId: "1",
- Rollout: 100,
- },
- },
- },
- {
- Id: "2",
- SegmentKeys: []string{"segment1", "segment2"},
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- Rank: 2,
- },
- },
- },
-
- nsToRollouts: map[string][]*flipt.Rollout{
- "default": {
- {
- Id: "1",
- FlagKey: "flag2",
- Type: flipt.RolloutType_SEGMENT_ROLLOUT_TYPE,
- Description: "enabled for internal users",
- Rank: int32(1),
- Rule: &flipt.Rollout_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "internal_users",
- Value: true,
- },
- },
- },
- {
- Id: "2",
- FlagKey: "flag2",
- Type: flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE,
- Description: "enabled for 50%",
- Rank: int32(2),
- Rule: &flipt.Rollout_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: float32(50.0),
- Value: true,
- },
- },
- },
- {
- Id: "3",
- FlagKey: "FLag2",
- Type: flipt.RolloutType_SEGMENT_ROLLOUT_TYPE,
- Description: "enabled for external users",
- Rank: int32(1),
- Rule: &flipt.Rollout_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "external_users",
- Value: true,
- },
- },
- },
- {
- Id: "4",
- FlagKey: "FLag2",
- Type: flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE,
- Description: "enabled for 60%",
- Rank: int32(2),
- Rule: &flipt.Rollout_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: float32(60.0),
- Value: true,
- },
- },
- },
- },
- "foo": {
- {
- Id: "1",
- FlagKey: "flag2",
- Type: flipt.RolloutType_SEGMENT_ROLLOUT_TYPE,
- Description: "enabled for internal users",
- Rank: int32(1),
- Rule: &flipt.Rollout_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "internal_users",
- Value: true,
- },
- },
- },
- {
- Id: "2",
- FlagKey: "flag2",
- Type: flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE,
- Description: "enabled for 50%",
- Rank: int32(2),
- Rule: &flipt.Rollout_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: float32(50.0),
- Value: true,
- },
- },
- },
- {
- Id: "3",
- FlagKey: "FLag2",
- Type: flipt.RolloutType_SEGMENT_ROLLOUT_TYPE,
- Description: "enabled for external users",
- Rank: int32(1),
- Rule: &flipt.Rollout_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "external_users",
- Value: true,
- },
- },
- },
- {
- Id: "4",
- FlagKey: "FLag2",
- Type: flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE,
- Description: "enabled for 60%",
- Rank: int32(2),
- Rule: &flipt.Rollout_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: float32(60.0),
- Value: true,
- },
- },
- },
- },
- },
- },
- path: "testdata/export_default_and_foo_sorted",
- namespaces: "default,foo",
- allNamespaces: false,
- sortByKey: true,
- },
- {
- name: "all namespaces with sort by key",
- lister: mockLister{
- namespaces: map[string]*flipt.Namespace{
- "0_default": {
- Key: "default",
- Name: "default",
- Description: "default namespace",
- },
-
- "1_foo": {
- Key: "foo",
- Name: "foo",
- Description: "foo namespace",
- },
-
- "2_bar": {
- Key: "bar",
- Name: "bar",
- Description: "bar namespace",
- },
- },
- nsToFlags: map[string][]*flipt.Flag{
- "foo": {
- {
- Key: "flag2",
- Name: "flag2",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Description: "a boolean flag",
- Enabled: false,
- },
- {
- Key: "flag1",
- Name: "flag1",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Description: "description",
- Enabled: true,
- Variants: []*flipt.Variant{
- {
- Id: "1",
- Key: "variant1",
- Name: "variant1",
- Attachment: `{
- "pi": 3.141,
- "happy": true,
- "name": "Niels",
- "nothing": null,
- "answer": {
- "everything": 42
- },
- "list": [1, 0, 2],
- "object": {
- "currency": "USD",
- "value": 42.99
- }
- }`,
- },
- {
- Id: "2",
- Key: "foo",
- },
- },
- },
- {
- Key: "FLag2",
- Name: "FLag2",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Description: "a boolean flag",
- Enabled: false,
- Metadata: newStruct(t, map[string]any{"label": "bool", "area": 12}),
- },
- },
- "bar": {
- {
- Key: "flag2",
- Name: "flag2",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Description: "a boolean flag",
- Enabled: false,
- },
- {
- Key: "flag1",
- Name: "flag1",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Description: "description",
- Enabled: true,
- Variants: []*flipt.Variant{
- {
- Id: "1",
- Key: "variant1",
- Name: "variant1",
- Attachment: `{
- "pi": 3.141,
- "happy": true,
- "name": "Niels",
- "nothing": null,
- "answer": {
- "everything": 42
- },
- "list": [1, 0, 2],
- "object": {
- "currency": "USD",
- "value": 42.99
- }
- }`,
- },
- {
- Id: "2",
- Key: "foo",
- },
- },
- },
- {
- Key: "FLag2",
- Name: "FLag2",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Description: "a boolean flag",
- Enabled: false,
- Metadata: newStruct(t, map[string]any{"label": "bool", "area": 12}),
- },
- },
- },
- nsToSegments: map[string][]*flipt.Segment{
- "foo": {
- {
- Key: "segment2",
- Name: "segment2",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- {
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- Constraints: []*flipt.Constraint{
- {
- Id: "1",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "eq",
- Value: "baz",
- Description: "desc",
- },
- {
- Id: "2",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "fizz",
- Operator: "neq",
- Value: "buzz",
- Description: "desc",
- },
- },
- },
- },
- "bar": {
- {
- Key: "segment2",
- Name: "segment2",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- {
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- Constraints: []*flipt.Constraint{
- {
- Id: "1",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "eq",
- Value: "baz",
- Description: "desc",
- },
- {
- Id: "2",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "fizz",
- Operator: "neq",
- Value: "buzz",
- Description: "desc",
- },
- },
- },
- },
- },
- nsToRules: map[string][]*flipt.Rule{
- "foo": {
- {
- Id: "1",
- SegmentKey: "segment1",
- Rank: 1,
- Distributions: []*flipt.Distribution{
- {
- Id: "1",
- VariantId: "1",
- RuleId: "1",
- Rollout: 100,
- },
- },
- },
- {
- Id: "2",
- SegmentKeys: []string{"segment1", "segment2"},
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- Rank: 2,
- },
- },
- "bar": {
- {
- Id: "1",
- SegmentKey: "segment1",
- Rank: 1,
- Distributions: []*flipt.Distribution{
- {
- Id: "1",
- VariantId: "1",
- RuleId: "1",
- Rollout: 100,
- },
- },
- },
- {
- Id: "2",
- SegmentKeys: []string{"segment1", "segment2"},
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- Rank: 2,
- },
- },
- },
-
- nsToRollouts: map[string][]*flipt.Rollout{
- "foo": {
- {
- Id: "1",
- FlagKey: "flag2",
- Type: flipt.RolloutType_SEGMENT_ROLLOUT_TYPE,
- Description: "enabled for internal users",
- Rank: int32(1),
- Rule: &flipt.Rollout_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "internal_users",
- Value: true,
- },
- },
- },
- {
- Id: "2",
- FlagKey: "flag2",
- Type: flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE,
- Description: "enabled for 50%",
- Rank: int32(2),
- Rule: &flipt.Rollout_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: float32(50.0),
- Value: true,
- },
- },
- },
- {
- Id: "3",
- FlagKey: "FLag2",
- Type: flipt.RolloutType_SEGMENT_ROLLOUT_TYPE,
- Description: "enabled for external users",
- Rank: int32(1),
- Rule: &flipt.Rollout_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "external_users",
- Value: true,
- },
- },
- },
- {
- Id: "4",
- FlagKey: "FLag2",
- Type: flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE,
- Description: "enabled for 60%",
- Rank: int32(2),
- Rule: &flipt.Rollout_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: float32(60.0),
- Value: true,
- },
- },
- },
- },
- "bar": {
- {
- Id: "1",
- FlagKey: "flag2",
- Type: flipt.RolloutType_SEGMENT_ROLLOUT_TYPE,
- Description: "enabled for internal users",
- Rank: int32(1),
- Rule: &flipt.Rollout_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "internal_users",
- Value: true,
- },
- },
- },
- {
- Id: "2",
- FlagKey: "flag2",
- Type: flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE,
- Description: "enabled for 50%",
- Rank: int32(2),
- Rule: &flipt.Rollout_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: float32(50.0),
- Value: true,
- },
- },
- },
- {
- Id: "3",
- FlagKey: "FLag2",
- Type: flipt.RolloutType_SEGMENT_ROLLOUT_TYPE,
- Description: "enabled for external users",
- Rank: int32(1),
- Rule: &flipt.Rollout_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "external_users",
- Value: true,
- },
- },
- },
- {
- Id: "4",
- FlagKey: "FLag2",
- Type: flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE,
- Description: "enabled for 60%",
- Rank: int32(2),
- Rule: &flipt.Rollout_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: float32(60.0),
- Value: true,
- },
- },
- },
- },
- },
- },
- path: "testdata/export_all_namespaces_sorted",
- namespaces: "",
- allNamespaces: true,
- sortByKey: true,
- },
- }
-
- for _, tc := range tests {
- tc := tc
- for _, ext := range extensions {
- t.Run(fmt.Sprintf("%s (%s)", tc.name, ext), func(t *testing.T) {
- var (
- exporter = NewExporter(tc.lister, tc.namespaces, tc.allNamespaces, tc.sortByKey)
- b = new(bytes.Buffer)
- )
-
- err := exporter.Export(context.Background(), ext, b)
- require.NoError(t, err)
-
- in, err := os.ReadFile(tc.path + "." + string(ext))
- require.NoError(t, err)
-
- var (
- expected = ext.NewDecoder(bytes.NewReader(in))
- found = ext.NewDecoder(b)
- )
-
- // handle newline delimited JSON
- for {
- var exp, fnd any
- eerr := expected.Decode(&exp)
- ferr := found.Decode(&fnd)
- require.Equal(t, eerr, ferr)
-
- if errors.Is(ferr, io.EOF) {
- break
- }
- require.NoError(t, ferr)
-
- assert.Equal(t, exp, fnd)
- }
- })
- }
- }
-}
diff --git a/internal/ext/importer.go b/internal/ext/importer.go
deleted file mode 100644
index ac50f1d6bb..0000000000
--- a/internal/ext/importer.go
+++ /dev/null
@@ -1,509 +0,0 @@
-package ext
-
-import (
- "bufio"
- "context"
- "encoding/json"
- "errors"
- "fmt"
- "io"
-
- "github.com/blang/semver/v4"
- errs "go.flipt.io/flipt/errors"
- "go.flipt.io/flipt/rpc/flipt"
- "google.golang.org/grpc/codes"
- "google.golang.org/grpc/status"
- "google.golang.org/protobuf/types/known/structpb"
-)
-
-type Creator interface {
- GetNamespace(context.Context, *flipt.GetNamespaceRequest) (*flipt.Namespace, error)
- CreateNamespace(context.Context, *flipt.CreateNamespaceRequest) (*flipt.Namespace, error)
- CreateFlag(context.Context, *flipt.CreateFlagRequest) (*flipt.Flag, error)
- UpdateFlag(context.Context, *flipt.UpdateFlagRequest) (*flipt.Flag, error)
- CreateVariant(context.Context, *flipt.CreateVariantRequest) (*flipt.Variant, error)
- CreateSegment(context.Context, *flipt.CreateSegmentRequest) (*flipt.Segment, error)
- CreateConstraint(context.Context, *flipt.CreateConstraintRequest) (*flipt.Constraint, error)
- CreateRule(context.Context, *flipt.CreateRuleRequest) (*flipt.Rule, error)
- CreateDistribution(context.Context, *flipt.CreateDistributionRequest) (*flipt.Distribution, error)
- CreateRollout(context.Context, *flipt.CreateRolloutRequest) (*flipt.Rollout, error)
- ListFlags(ctx context.Context, v *flipt.ListFlagRequest) (*flipt.FlagList, error)
- ListSegments(ctx context.Context, v *flipt.ListSegmentRequest) (*flipt.SegmentList, error)
-}
-
-type Importer struct {
- creator Creator
-}
-
-type ImportOpt func(*Importer)
-
-func NewImporter(store Creator, opts ...ImportOpt) *Importer {
- i := &Importer{
- creator: store,
- }
-
- for _, opt := range opts {
- opt(i)
- }
-
- return i
-}
-
-func (i *Importer) Import(ctx context.Context, enc Encoding, r io.Reader, skipExisting bool) (err error) {
- if enc == EncodingJSON {
- r, err = i.jsonReader(r)
- if err != nil {
- return err
- }
- }
- var (
- dec = enc.NewDecoder(r)
- version semver.Version
- )
-
- idx := 0
-
- for {
- doc := new(Document)
- if err := dec.Decode(doc); err != nil {
- if errors.Is(err, io.EOF) {
- break
- }
- return fmt.Errorf("unmarshalling document: %w", err)
- }
-
- // Only support parsing vesrion at the top of each import file.
- if idx == 0 {
- version = latestVersion
- if doc.Version != "" {
- version, err = semver.ParseTolerant(doc.Version)
- if err != nil {
- return fmt.Errorf("parsing document version: %w", err)
- }
-
- var found bool
- for _, sv := range supportedVersions {
- if found = sv.EQ(version); found {
- break
- }
- }
-
- if !found {
- return fmt.Errorf("unsupported version: %s", doc.Version)
- }
- }
- }
-
- namespaceKey := flipt.DefaultNamespace
-
- // non-default namespace, create it if it doesn't exist
- if doc.Namespace != nil && doc.Namespace.GetKey() != flipt.DefaultNamespace {
- namespaceKey = doc.Namespace.GetKey()
- _, err := i.creator.GetNamespace(ctx, &flipt.GetNamespaceRequest{
- Key: namespaceKey,
- })
- if err != nil {
- if status.Code(err) != codes.NotFound && !errs.AsMatch[errs.ErrNotFound](err) {
- return err
- }
-
- var namespaceName, namespaceDescription string
-
- switch ns := doc.Namespace.IsNamespace.(type) {
- case NamespaceKey:
- namespaceName = string(ns)
- case *Namespace:
- namespaceName = ns.Name
- namespaceDescription = ns.Description
- }
-
- _, err = i.creator.CreateNamespace(ctx, &flipt.CreateNamespaceRequest{
- Key: namespaceKey,
- Name: namespaceName,
- Description: namespaceDescription,
- })
- if err != nil {
- return err
- }
- }
- }
-
- var (
- // map flagKey => *flag
- createdFlags = make(map[string]*flipt.Flag)
- // map segmentKey => *segment
- createdSegments = make(map[string]*flipt.Segment)
- // map flagKey:variantKey => *variant
- createdVariants = make(map[string]*flipt.Variant)
- // map flagKey => bool
- existingFlags = make(map[string]bool)
- // map segmentKey => bool
- existingSegments = make(map[string]bool)
- )
-
- if skipExisting {
- existingFlags, err = i.existingFlags(ctx, namespaceKey)
- if err != nil {
- return err
- }
-
- existingSegments, err = i.existingSegments(ctx, namespaceKey)
- if err != nil {
- return err
- }
- }
-
- // create flags/variants
- for _, f := range doc.Flags {
- if f == nil {
- continue
- } else if existingFlags[f.Key] {
- continue
- }
-
- req := &flipt.CreateFlagRequest{
- Key: f.Key,
- Name: f.Name,
- Description: f.Description,
- Enabled: f.Enabled,
- NamespaceKey: namespaceKey,
- }
-
- if f.Metadata != nil {
- metadata, err := structpb.NewStruct(f.Metadata)
- if err != nil {
- return err
- }
- req.Metadata = metadata
- }
-
- // support explicitly setting flag type from 1.1
- if f.Type != "" {
- if err := ensureFieldSupported("flag.type", v1_1, version); err != nil {
- return err
- }
-
- req.Type = flipt.FlagType(flipt.FlagType_value[f.Type])
- }
-
- flag, err := i.creator.CreateFlag(ctx, req)
- if err != nil {
- return fmt.Errorf("creating flag: %w", err)
- }
-
- var defaultVariantId string
-
- for _, v := range f.Variants {
- if v == nil {
- continue
- }
-
- var out []byte
-
- if v.Attachment != nil {
- out, err = json.Marshal(v.Attachment)
- if err != nil {
- return fmt.Errorf("marshalling attachment: %w", err)
- }
- }
-
- variant, err := i.creator.CreateVariant(ctx, &flipt.CreateVariantRequest{
- FlagKey: f.Key,
- Key: v.Key,
- Name: v.Name,
- Description: v.Description,
- Attachment: string(out),
- NamespaceKey: namespaceKey,
- })
- if err != nil {
- return fmt.Errorf("creating variant: %w", err)
- }
-
- // last variant with default=true will be the default variant when importing
- if v.Default {
- // support explicitly setting default variant from 1.3
- if err := ensureFieldSupported("variant.default", v1_3, version); err != nil {
- return err
- }
- defaultVariantId = variant.Id
- }
-
- createdVariants[fmt.Sprintf("%s:%s", flag.Key, variant.Key)] = variant
- }
-
- if defaultVariantId != "" {
- _, err := i.creator.UpdateFlag(ctx, &flipt.UpdateFlagRequest{
- Key: flag.Key,
- Name: flag.Name,
- Description: flag.Description,
- Enabled: flag.Enabled,
- NamespaceKey: namespaceKey,
- DefaultVariantId: defaultVariantId,
- })
- if err != nil {
- return fmt.Errorf("updating flag: %w", err)
- }
- }
-
- createdFlags[flag.Key] = flag
- }
-
- // create segments/constraints
- for _, s := range doc.Segments {
- if s == nil {
- continue
- } else if existingSegments[s.Key] {
- continue
- }
-
- segment, err := i.creator.CreateSegment(ctx, &flipt.CreateSegmentRequest{
- Key: s.Key,
- Name: s.Name,
- Description: s.Description,
- MatchType: flipt.MatchType(flipt.MatchType_value[s.MatchType]),
- NamespaceKey: namespaceKey,
- })
- if err != nil {
- return fmt.Errorf("creating segment: %w", err)
- }
-
- for _, c := range s.Constraints {
- if c == nil {
- continue
- }
-
- _, err := i.creator.CreateConstraint(ctx, &flipt.CreateConstraintRequest{
- SegmentKey: s.Key,
- Type: flipt.ComparisonType(flipt.ComparisonType_value[c.Type]),
- Property: c.Property,
- Operator: c.Operator,
- Value: c.Value,
- NamespaceKey: namespaceKey,
- })
- if err != nil {
- return fmt.Errorf("creating constraint: %w", err)
- }
- }
-
- createdSegments[segment.Key] = segment
- }
-
- // create rules/distributions
- for _, f := range doc.Flags {
- if f == nil {
- continue
- } else if existingFlags[f.Key] {
- continue
- }
-
- // loop through rules
- for idx, r := range f.Rules {
- if r == nil {
- continue
- }
-
- // support implicit rank from version >=1.1
- rank := int32(r.Rank)
- if rank == 0 && version.GE(v1_1) {
- rank = int32(idx) + 1
- }
-
- fcr := &flipt.CreateRuleRequest{
- FlagKey: f.Key,
- Rank: rank,
- NamespaceKey: namespaceKey,
- }
-
- switch s := r.Segment.IsSegment.(type) {
- case SegmentKey:
- fcr.SegmentKey = string(s)
- case *Segments:
- fcr.SegmentKeys = s.Keys
- fcr.SegmentOperator = flipt.SegmentOperator(flipt.SegmentOperator_value[s.SegmentOperator])
- }
-
- rule, err := i.creator.CreateRule(ctx, fcr)
- if err != nil {
- return fmt.Errorf("creating rule: %w", err)
- }
-
- for _, d := range r.Distributions {
- if d == nil {
- continue
- }
-
- variant, found := createdVariants[fmt.Sprintf("%s:%s", f.Key, d.VariantKey)]
- if !found {
- return fmt.Errorf("finding variant: %s; flag: %s", d.VariantKey, f.Key)
- }
-
- _, err := i.creator.CreateDistribution(ctx, &flipt.CreateDistributionRequest{
- FlagKey: f.Key,
- RuleId: rule.Id,
- VariantId: variant.Id,
- Rollout: d.Rollout,
- NamespaceKey: namespaceKey,
- })
- if err != nil {
- return fmt.Errorf("creating distribution: %w", err)
- }
- }
- }
-
- // support explicitly setting flag type from 1.1
- if len(f.Rollouts) > 0 {
- if err := ensureFieldSupported("flag.rollouts", v1_1, version); err != nil {
- return err
- }
-
- for idx, r := range f.Rollouts {
- if r.Segment != nil && r.Threshold != nil {
- return fmt.Errorf(`rollout "%s/%s/%d" cannot have both segment and percentage rule`,
- namespaceKey,
- f.Key,
- idx,
- )
- }
-
- req := &flipt.CreateRolloutRequest{
- NamespaceKey: namespaceKey,
- FlagKey: f.Key,
- Description: r.Description,
- Rank: int32(idx + 1),
- }
-
- if r.Segment != nil {
- frs := &flipt.RolloutSegment{
- Value: r.Segment.Value,
- SegmentKey: r.Segment.Key,
- }
-
- if len(r.Segment.Keys) > 0 && r.Segment.Key != "" {
- return fmt.Errorf("rollout %s/%s/%d cannot have both segment.keys and segment.key",
- namespaceKey,
- f.Key,
- idx,
- )
- }
-
- // support explicitly setting only "keys" on rules from 1.2
- if len(r.Segment.Keys) > 0 {
- if err := ensureFieldSupported("flag.rollouts[*].segment.keys", v1_2, version); err != nil {
- return err
- }
-
- frs.SegmentKeys = r.Segment.Keys
- }
-
- frs.SegmentOperator = flipt.SegmentOperator(flipt.SegmentOperator_value[r.Segment.Operator])
-
- req.Rule = &flipt.CreateRolloutRequest_Segment{
- Segment: frs,
- }
- } else if r.Threshold != nil {
- req.Rule = &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: r.Threshold.Percentage,
- Value: r.Threshold.Value,
- },
- }
- }
-
- if _, err := i.creator.CreateRollout(ctx, req); err != nil {
- return fmt.Errorf("creating rollout: %w", err)
- }
- }
- }
- }
-
- idx += 1
- }
-
- return nil
-}
-
-// jsonReader prepares the reader for reading the import file.
-// It skips the first line if it starts with '#'
-// See more github.com/flipt-io/flipt/issues/3636
-func (*Importer) jsonReader(r io.Reader) (io.Reader, error) {
- br := bufio.NewReader(r)
- b, err := br.Peek(1)
- if err != nil {
- return nil, err
- }
-
- if b[0] == '#' {
- _, _, err := br.ReadLine()
- if err != nil {
- return nil, err
- }
- }
- return br, nil
-}
-
-func ensureFieldSupported(field string, expected, have semver.Version) error {
- if have.LT(expected) {
- return fmt.Errorf("%s is supported in version >=%s, found %s",
- field,
- versionString(expected),
- versionString(have))
- }
-
- return nil
-}
-
-func (i *Importer) existingFlags(ctx context.Context, namespace string) (map[string]bool, error) {
- // map flagKey => bool
- existingFlags := make(map[string]bool)
-
- nextPageToken := ""
- for {
- req := &flipt.ListFlagRequest{
- PageToken: nextPageToken,
- NamespaceKey: namespace,
- }
- flagList, err := i.creator.ListFlags(ctx, req)
- if err != nil {
- return nil, fmt.Errorf("listing flags: %w", err)
- }
-
- for _, f := range flagList.Flags {
- existingFlags[f.Key] = true
- }
-
- nextPageToken = flagList.NextPageToken
- if nextPageToken == "" {
- break
- }
- }
-
- return existingFlags, nil
-}
-
-func (i *Importer) existingSegments(ctx context.Context, namespace string) (map[string]bool, error) {
- // map segmentKey => bool
- existingSegments := make(map[string]bool)
-
- nextPageToken := ""
- for {
- req := &flipt.ListSegmentRequest{
- PageToken: nextPageToken,
- NamespaceKey: namespace,
- }
- segmentList, err := i.creator.ListSegments(ctx, req)
- if err != nil {
- return nil, fmt.Errorf("listing segments: %w", err)
- }
-
- for _, s := range segmentList.Segments {
- existingSegments[s.Key] = true
- }
-
- nextPageToken = segmentList.NextPageToken
- if nextPageToken == "" {
- break
- }
- }
-
- return existingSegments, nil
-}
diff --git a/internal/ext/importer_fuzz_test.go b/internal/ext/importer_fuzz_test.go
deleted file mode 100644
index 5f5f8def95..0000000000
--- a/internal/ext/importer_fuzz_test.go
+++ /dev/null
@@ -1,29 +0,0 @@
-//go:build go1.18
-// +build go1.18
-
-package ext
-
-import (
- "bytes"
- "context"
- "os"
- "testing"
-)
-
-func FuzzImport(f *testing.F) {
- testcases := []string{"testdata/import.yml", "testdata/import_no_attachment.yml", "testdata/export.yml"}
- skipExistingFalse := false
-
- for _, tc := range testcases {
- b, _ := os.ReadFile(tc)
- f.Add(b)
- }
-
- f.Fuzz(func(t *testing.T, in []byte) {
- importer := NewImporter(&mockCreator{})
- if err := importer.Import(context.Background(), EncodingYAML, bytes.NewReader(in), skipExistingFalse); err != nil {
- // we only care about panics
- t.Skip()
- }
- })
-}
diff --git a/internal/ext/importer_test.go b/internal/ext/importer_test.go
deleted file mode 100644
index 3546fc059c..0000000000
--- a/internal/ext/importer_test.go
+++ /dev/null
@@ -1,1287 +0,0 @@
-package ext
-
-import (
- "context"
- "encoding/json"
- "errors"
- "fmt"
- "os"
- "testing"
-
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
- "go.flipt.io/flipt/rpc/flipt"
-)
-
-var (
- extensions = []Encoding{EncodingYML, EncodingJSON}
- skipExistingFalse = false
-)
-
-type mockCreator struct {
- getNSReqs []*flipt.GetNamespaceRequest
- getNSErr error
-
- createNSReqs []*flipt.CreateNamespaceRequest
- createNSErr error
-
- createflagReqs []*flipt.CreateFlagRequest
- createflagErr error
-
- updateFlagReqs []*flipt.UpdateFlagRequest
- updateFlagErr error
-
- variantReqs []*flipt.CreateVariantRequest
- variantErr error
-
- segmentReqs []*flipt.CreateSegmentRequest
- segmentErr error
-
- constraintReqs []*flipt.CreateConstraintRequest
- constraintErr error
-
- ruleReqs []*flipt.CreateRuleRequest
- ruleErr error
-
- distributionReqs []*flipt.CreateDistributionRequest
- distributionErr error
-
- rolloutReqs []*flipt.CreateRolloutRequest
- rolloutErr error
-
- listFlagReqs []*flipt.ListFlagRequest
- listFlagResps []*flipt.FlagList
- listFlagErr error
-
- listSegmentReqs []*flipt.ListSegmentRequest
- listSegmentResps []*flipt.SegmentList
- listSegmentErr error
-}
-
-func (m *mockCreator) GetNamespace(ctx context.Context, r *flipt.GetNamespaceRequest) (*flipt.Namespace, error) {
- m.getNSReqs = append(m.getNSReqs, r)
- return &flipt.Namespace{Key: "default"}, m.getNSErr
-}
-
-func (m *mockCreator) CreateNamespace(ctx context.Context, r *flipt.CreateNamespaceRequest) (*flipt.Namespace, error) {
- m.createNSReqs = append(m.createNSReqs, r)
- return &flipt.Namespace{Key: "default"}, m.createNSErr
-}
-
-func (m *mockCreator) CreateFlag(ctx context.Context, r *flipt.CreateFlagRequest) (*flipt.Flag, error) {
- m.createflagReqs = append(m.createflagReqs, r)
- if m.createflagErr != nil {
- return nil, m.createflagErr
- }
- return &flipt.Flag{
- Key: r.Key,
- NamespaceKey: r.NamespaceKey,
- Name: r.Name,
- Description: r.Description,
- Type: r.Type,
- Enabled: r.Enabled,
- Metadata: r.Metadata,
- }, nil
-}
-
-func (m *mockCreator) UpdateFlag(ctx context.Context, r *flipt.UpdateFlagRequest) (*flipt.Flag, error) {
- m.updateFlagReqs = append(m.updateFlagReqs, r)
- if m.updateFlagErr != nil {
- return nil, m.updateFlagErr
- }
- return &flipt.Flag{
- Key: r.Key,
- NamespaceKey: r.NamespaceKey,
- Name: r.Name,
- Description: r.Description,
- DefaultVariant: &flipt.Variant{
- Id: r.DefaultVariantId,
- },
- Enabled: r.Enabled,
- }, nil
-}
-
-func (m *mockCreator) CreateVariant(ctx context.Context, r *flipt.CreateVariantRequest) (*flipt.Variant, error) {
- m.variantReqs = append(m.variantReqs, r)
- if m.variantErr != nil {
- return nil, m.variantErr
- }
- return &flipt.Variant{
- Id: "static_variant_id",
- NamespaceKey: r.NamespaceKey,
- FlagKey: r.FlagKey,
- Key: r.Key,
- Name: r.Name,
- Description: r.Description,
- Attachment: r.Attachment,
- }, nil
-}
-
-func (m *mockCreator) CreateSegment(ctx context.Context, r *flipt.CreateSegmentRequest) (*flipt.Segment, error) {
- m.segmentReqs = append(m.segmentReqs, r)
- if m.segmentErr != nil {
- return nil, m.segmentErr
- }
- return &flipt.Segment{
- Key: r.Key,
- NamespaceKey: r.NamespaceKey,
- Name: r.Name,
- Description: r.Description,
- MatchType: r.MatchType,
- }, nil
-}
-
-func (m *mockCreator) CreateConstraint(ctx context.Context, r *flipt.CreateConstraintRequest) (*flipt.Constraint, error) {
- m.constraintReqs = append(m.constraintReqs, r)
- if m.constraintErr != nil {
- return nil, m.constraintErr
- }
- return &flipt.Constraint{
- Id: "static_constraint_id",
- NamespaceKey: r.NamespaceKey,
- SegmentKey: r.SegmentKey,
- Type: r.Type,
- Property: r.Property,
- Operator: r.Operator,
- Value: r.Value,
- }, nil
-}
-
-func (m *mockCreator) CreateRule(ctx context.Context, r *flipt.CreateRuleRequest) (*flipt.Rule, error) {
- m.ruleReqs = append(m.ruleReqs, r)
- if m.ruleErr != nil {
- return nil, m.ruleErr
- }
- return &flipt.Rule{
- Id: "static_rule_id",
- NamespaceKey: r.NamespaceKey,
- FlagKey: r.FlagKey,
- SegmentKey: r.SegmentKey,
- Rank: r.Rank,
- }, nil
-}
-
-func (m *mockCreator) CreateDistribution(ctx context.Context, r *flipt.CreateDistributionRequest) (*flipt.Distribution, error) {
- m.distributionReqs = append(m.distributionReqs, r)
- if m.distributionErr != nil {
- return nil, m.distributionErr
- }
- return &flipt.Distribution{
- Id: "static_distribution_id",
- RuleId: r.RuleId,
- VariantId: r.VariantId,
- Rollout: r.Rollout,
- }, nil
-}
-
-func (m *mockCreator) CreateRollout(ctx context.Context, r *flipt.CreateRolloutRequest) (*flipt.Rollout, error) {
- m.rolloutReqs = append(m.rolloutReqs, r)
- if m.rolloutErr != nil {
- return nil, m.rolloutErr
- }
-
- rollout := &flipt.Rollout{
- Id: "static_rollout_id",
- NamespaceKey: r.NamespaceKey,
- FlagKey: r.FlagKey,
- Description: r.Description,
- Rank: r.Rank,
- }
-
- switch rule := r.Rule.(type) {
- case *flipt.CreateRolloutRequest_Threshold:
- rollout.Rule = &flipt.Rollout_Threshold{
- Threshold: rule.Threshold,
- }
- case *flipt.CreateRolloutRequest_Segment:
- rollout.Rule = &flipt.Rollout_Segment{
- Segment: rule.Segment,
- }
- default:
- return nil, errors.New("unexpected rollout rule type")
- }
-
- return rollout, nil
-}
-
-func (m *mockCreator) ListFlags(ctx context.Context, r *flipt.ListFlagRequest) (*flipt.FlagList, error) {
- m.listFlagReqs = append(m.listFlagReqs, r)
- if m.listFlagErr != nil {
- return nil, m.listFlagErr
- }
-
- if len(m.listFlagResps) == 0 {
- return nil, fmt.Errorf("no response for ListFlags request: %+v", r)
- }
-
- var resp *flipt.FlagList
- resp, m.listFlagResps = m.listFlagResps[0], m.listFlagResps[1:]
- return resp, nil
-}
-
-func (m *mockCreator) ListSegments(ctx context.Context, r *flipt.ListSegmentRequest) (*flipt.SegmentList, error) {
- m.listSegmentReqs = append(m.listSegmentReqs, r)
- if m.listSegmentErr != nil {
- return nil, m.listSegmentErr
- }
-
- if len(m.listSegmentResps) == 0 {
- return nil, fmt.Errorf("no response for ListSegments request: %+v", r)
- }
-
- var resp *flipt.SegmentList
- resp, m.listSegmentResps = m.listSegmentResps[0], m.listSegmentResps[1:]
- return resp, nil
-}
-
-const variantAttachment = `{
- "pi": 3.141,
- "happy": true,
- "name": "Niels",
- "answer": {
- "everything": 42
- },
- "list": [1, 0, 2],
- "object": {
- "currency": "USD",
- "value": 42.99
- }
-}`
-
-func TestImport(t *testing.T) {
- tests := []struct {
- name string
- path string
- skipExisting bool
- creator func() *mockCreator
- expected *mockCreator
- }{
- {
- name: "import with attachment and default variant",
- path: "testdata/import",
- expected: &mockCreator{
- createflagReqs: []*flipt.CreateFlagRequest{
- {
- NamespaceKey: "default",
- Key: "flag1",
- Name: "flag1",
- Description: "description",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Enabled: true,
- },
- {
- NamespaceKey: "default",
- Key: "flag2",
- Name: "flag2",
- Description: "a boolean flag",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Enabled: false,
- },
- },
- variantReqs: []*flipt.CreateVariantRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag1",
- Key: "variant1",
- Name: "variant1",
- Description: "variant description",
- Attachment: compact(t, variantAttachment),
- },
- },
- updateFlagReqs: []*flipt.UpdateFlagRequest{
- {
- NamespaceKey: "default",
- Key: "flag1",
- Name: "flag1",
- Description: "description",
- Enabled: true,
- DefaultVariantId: "static_variant_id",
- },
- },
- segmentReqs: []*flipt.CreateSegmentRequest{
- {
- NamespaceKey: "default",
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- },
- constraintReqs: []*flipt.CreateConstraintRequest{
- {
- NamespaceKey: "default",
- SegmentKey: "segment1",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "fizz",
- Operator: "neq",
- Value: "buzz",
- },
- },
- ruleReqs: []*flipt.CreateRuleRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag1",
- SegmentKey: "segment1",
- Rank: 1,
- },
- },
- distributionReqs: []*flipt.CreateDistributionRequest{
- {
- NamespaceKey: "default",
- RuleId: "static_rule_id",
- VariantId: "static_variant_id",
- FlagKey: "flag1",
- Rollout: 100,
- },
- },
- rolloutReqs: []*flipt.CreateRolloutRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag2",
- Description: "enabled for internal users",
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "internal_users",
- Value: true,
- },
- },
- },
- {
- NamespaceKey: "default",
- FlagKey: "flag2",
- Description: "enabled for 50%",
- Rank: 2,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: 50.0,
- Value: true,
- },
- },
- },
- },
- },
- },
- {
- name: "import with attachment",
- path: "testdata/import_with_attachment",
- expected: &mockCreator{
- createflagReqs: []*flipt.CreateFlagRequest{
- {
- NamespaceKey: "default",
- Key: "flag1",
- Name: "flag1",
- Description: "description",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Enabled: true,
- },
- {
- NamespaceKey: "default",
- Key: "flag2",
- Name: "flag2",
- Description: "a boolean flag",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Enabled: false,
- },
- },
- variantReqs: []*flipt.CreateVariantRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag1",
- Key: "variant1",
- Name: "variant1",
- Description: "variant description",
- Attachment: compact(t, variantAttachment),
- },
- },
- segmentReqs: []*flipt.CreateSegmentRequest{
- {
- NamespaceKey: "default",
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- },
- constraintReqs: []*flipt.CreateConstraintRequest{
- {
- NamespaceKey: "default",
- SegmentKey: "segment1",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "fizz",
- Operator: "neq",
- Value: "buzz",
- },
- },
- ruleReqs: []*flipt.CreateRuleRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag1",
- SegmentKey: "segment1",
- Rank: 1,
- },
- },
- distributionReqs: []*flipt.CreateDistributionRequest{
- {
- NamespaceKey: "default",
- RuleId: "static_rule_id",
- VariantId: "static_variant_id",
- FlagKey: "flag1",
- Rollout: 100,
- },
- },
- rolloutReqs: []*flipt.CreateRolloutRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag2",
- Description: "enabled for internal users",
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "internal_users",
- Value: true,
- },
- },
- },
- {
- NamespaceKey: "default",
- FlagKey: "flag2",
- Description: "enabled for 50%",
- Rank: 2,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: 50.0,
- Value: true,
- },
- },
- },
- },
- },
- },
- {
- name: "import without attachment",
- path: "testdata/import_no_attachment",
- expected: &mockCreator{
- createflagReqs: []*flipt.CreateFlagRequest{
- {
- NamespaceKey: "default",
- Key: "flag1",
- Name: "flag1",
- Description: "description",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Enabled: true,
- },
- {
- NamespaceKey: "default",
- Key: "flag2",
- Name: "flag2",
- Description: "a boolean flag",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Enabled: false,
- },
- },
- variantReqs: []*flipt.CreateVariantRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag1",
- Key: "variant1",
- Name: "variant1",
- Description: "variant description",
- },
- },
- segmentReqs: []*flipt.CreateSegmentRequest{
- {
- NamespaceKey: "default",
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- },
- constraintReqs: []*flipt.CreateConstraintRequest{
- {
- NamespaceKey: "default",
- SegmentKey: "segment1",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "fizz",
- Operator: "neq",
- Value: "buzz",
- },
- },
- ruleReqs: []*flipt.CreateRuleRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag1",
- SegmentKey: "segment1",
- Rank: 1,
- },
- },
- distributionReqs: []*flipt.CreateDistributionRequest{
- {
- NamespaceKey: "default",
- RuleId: "static_rule_id",
- VariantId: "static_variant_id",
- FlagKey: "flag1",
- Rollout: 100,
- },
- },
- rolloutReqs: []*flipt.CreateRolloutRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag2",
- Description: "enabled for internal users",
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "internal_users",
- Value: true,
- },
- },
- },
- {
- NamespaceKey: "default",
- FlagKey: "flag2",
- Description: "enabled for 50%",
- Rank: 2,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: 50.0,
- Value: true,
- },
- },
- },
- },
- },
- },
- {
- name: "import with implicit rule ranks",
- path: "testdata/import_implicit_rule_rank",
- expected: &mockCreator{
- createflagReqs: []*flipt.CreateFlagRequest{
- {
- NamespaceKey: "default",
- Key: "flag1",
- Name: "flag1",
- Description: "description",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Enabled: true,
- },
- {
- NamespaceKey: "default",
- Key: "flag2",
- Name: "flag2",
- Description: "a boolean flag",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Enabled: false,
- },
- },
- variantReqs: []*flipt.CreateVariantRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag1",
- Key: "variant1",
- Name: "variant1",
- Description: "variant description",
- Attachment: compact(t, variantAttachment),
- },
- },
- segmentReqs: []*flipt.CreateSegmentRequest{
- {
- NamespaceKey: "default",
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- },
- constraintReqs: []*flipt.CreateConstraintRequest{
- {
- NamespaceKey: "default",
- SegmentKey: "segment1",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "fizz",
- Operator: "neq",
- Value: "buzz",
- },
- },
- ruleReqs: []*flipt.CreateRuleRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag1",
- SegmentKey: "segment1",
- Rank: 1,
- },
- },
- distributionReqs: []*flipt.CreateDistributionRequest{
- {
- NamespaceKey: "default",
- RuleId: "static_rule_id",
- VariantId: "static_variant_id",
- FlagKey: "flag1",
- Rollout: 100,
- },
- },
- rolloutReqs: []*flipt.CreateRolloutRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag2",
- Description: "enabled for internal users",
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "internal_users",
- Value: true,
- },
- },
- },
- {
- NamespaceKey: "default",
- FlagKey: "flag2",
- Description: "enabled for 50%",
- Rank: 2,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: 50.0,
- Value: true,
- },
- },
- },
- },
- },
- },
- {
- name: "import with multiple segments",
- path: "testdata/import_rule_multiple_segments",
- expected: &mockCreator{
- createflagReqs: []*flipt.CreateFlagRequest{
- {
- NamespaceKey: "default",
- Key: "flag1",
- Name: "flag1",
- Description: "description",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Enabled: true,
- },
- {
- NamespaceKey: "default",
- Key: "flag2",
- Name: "flag2",
- Description: "a boolean flag",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Enabled: false,
- },
- },
- variantReqs: []*flipt.CreateVariantRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag1",
- Key: "variant1",
- Name: "variant1",
- Description: "variant description",
- Attachment: compact(t, variantAttachment),
- },
- },
- segmentReqs: []*flipt.CreateSegmentRequest{
- {
- NamespaceKey: "default",
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- },
- constraintReqs: []*flipt.CreateConstraintRequest{
- {
- NamespaceKey: "default",
- SegmentKey: "segment1",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "fizz",
- Operator: "neq",
- Value: "buzz",
- },
- },
- ruleReqs: []*flipt.CreateRuleRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag1",
- SegmentKeys: []string{"segment1"},
- Rank: 1,
- },
- },
- distributionReqs: []*flipt.CreateDistributionRequest{
- {
- NamespaceKey: "default",
- RuleId: "static_rule_id",
- VariantId: "static_variant_id",
- FlagKey: "flag1",
- Rollout: 100,
- },
- },
- rolloutReqs: []*flipt.CreateRolloutRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag2",
- Description: "enabled for internal users",
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "internal_users",
- Value: true,
- },
- },
- },
- {
- NamespaceKey: "default",
- FlagKey: "flag2",
- Description: "enabled for 50%",
- Rank: 2,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: 50.0,
- Value: true,
- },
- },
- },
- },
- },
- },
- {
- name: "import v1",
- path: "testdata/import_v1",
- expected: &mockCreator{
- createflagReqs: []*flipt.CreateFlagRequest{
- {
- NamespaceKey: "default",
- Key: "flag1",
- Name: "flag1",
- Description: "description",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Enabled: true,
- },
- },
- variantReqs: []*flipt.CreateVariantRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag1",
- Key: "variant1",
- Name: "variant1",
- Description: "variant description",
- Attachment: compact(t, variantAttachment),
- },
- },
- segmentReqs: []*flipt.CreateSegmentRequest{
- {
- NamespaceKey: "default",
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- },
- constraintReqs: []*flipt.CreateConstraintRequest{
- {
- NamespaceKey: "default",
- SegmentKey: "segment1",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "fizz",
- Operator: "neq",
- Value: "buzz",
- },
- },
- ruleReqs: []*flipt.CreateRuleRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag1",
- SegmentKey: "segment1",
- Rank: 1,
- },
- },
- distributionReqs: []*flipt.CreateDistributionRequest{
- {
- NamespaceKey: "default",
- RuleId: "static_rule_id",
- VariantId: "static_variant_id",
- FlagKey: "flag1",
- Rollout: 100,
- },
- },
- },
- },
- {
- name: "import v1.1",
- path: "testdata/import_v1_1",
- expected: &mockCreator{
- createflagReqs: []*flipt.CreateFlagRequest{
- {
- NamespaceKey: "default",
- Key: "flag1",
- Name: "flag1",
- Description: "description",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Enabled: true,
- },
- {
- NamespaceKey: "default",
- Key: "flag2",
- Name: "flag2",
- Description: "a boolean flag",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Enabled: false,
- },
- },
- variantReqs: []*flipt.CreateVariantRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag1",
- Key: "variant1",
- Name: "variant1",
- Description: "variant description",
- Attachment: compact(t, variantAttachment),
- },
- },
- segmentReqs: []*flipt.CreateSegmentRequest{
- {
- NamespaceKey: "default",
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- },
- constraintReqs: []*flipt.CreateConstraintRequest{
- {
- NamespaceKey: "default",
- SegmentKey: "segment1",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "fizz",
- Operator: "neq",
- Value: "buzz",
- },
- },
- ruleReqs: []*flipt.CreateRuleRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag1",
- SegmentKey: "segment1",
- Rank: 1,
- },
- },
- distributionReqs: []*flipt.CreateDistributionRequest{
- {
- NamespaceKey: "default",
- RuleId: "static_rule_id",
- VariantId: "static_variant_id",
- FlagKey: "flag1",
- Rollout: 100,
- },
- },
- rolloutReqs: []*flipt.CreateRolloutRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag2",
- Description: "enabled for internal users",
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "internal_users",
- Value: true,
- },
- },
- },
- {
- NamespaceKey: "default",
- FlagKey: "flag2",
- Description: "enabled for 50%",
- Rank: 2,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: 50.0,
- Value: true,
- },
- },
- },
- },
- },
- },
- {
- name: "import new flags only",
- path: "testdata/import_new_flags_only",
- skipExisting: true,
- creator: func() *mockCreator {
- return &mockCreator{
- listFlagResps: []*flipt.FlagList{{
- Flags: []*flipt.Flag{{
- NamespaceKey: "default",
- Key: "flag1",
- Name: "flag1",
- Description: "description",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Enabled: true,
- }},
- }},
- listSegmentResps: []*flipt.SegmentList{{
- Segments: []*flipt.Segment{{
- NamespaceKey: "default",
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- }},
- }},
- }
- },
- expected: &mockCreator{
- createflagReqs: []*flipt.CreateFlagRequest{
- {
- NamespaceKey: "default",
- Key: "flag2",
- Name: "flag2",
- Description: "a boolean flag",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Enabled: false,
- },
- },
- variantReqs: nil,
- segmentReqs: []*flipt.CreateSegmentRequest{
- {
- NamespaceKey: "default",
- Key: "segment2",
- Name: "segment2",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- },
- constraintReqs: []*flipt.CreateConstraintRequest{
- {
- NamespaceKey: "default",
- SegmentKey: "segment2",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "buzz",
- Operator: "neq",
- Value: "fizz",
- },
- },
- ruleReqs: nil,
- distributionReqs: nil,
- rolloutReqs: []*flipt.CreateRolloutRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag2",
- Description: "enabled for internal users",
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "internal_users",
- Value: true,
- },
- },
- },
- {
- NamespaceKey: "default",
- FlagKey: "flag2",
- Description: "enabled for 50%",
- Rank: 2,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: 50.0,
- Value: true,
- },
- },
- },
- },
- listFlagReqs: []*flipt.ListFlagRequest{
- {
- NamespaceKey: "default",
- },
- },
- listFlagResps: []*flipt.FlagList{},
- listSegmentReqs: []*flipt.ListSegmentRequest{
- {
- NamespaceKey: "default",
- },
- },
- listSegmentResps: []*flipt.SegmentList{},
- },
- },
- {
- name: "import v1.3",
- path: "testdata/import_v1_3",
- expected: &mockCreator{
- createflagReqs: []*flipt.CreateFlagRequest{
- {
- NamespaceKey: "default",
- Key: "flag1",
- Name: "flag1",
- Description: "description",
- Type: flipt.FlagType_VARIANT_FLAG_TYPE,
- Enabled: true,
- Metadata: newStruct(t, map[string]any{"label": "variant", "area": true}),
- },
- {
- NamespaceKey: "default",
- Key: "flag2",
- Name: "flag2",
- Description: "a boolean flag",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Enabled: false,
- Metadata: newStruct(t, map[string]any{"label": "bool", "area": 12}),
- },
- },
- variantReqs: []*flipt.CreateVariantRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag1",
- Key: "variant1",
- Name: "variant1",
- Description: "variant description",
- Attachment: compact(t, variantAttachment),
- },
- },
- segmentReqs: []*flipt.CreateSegmentRequest{
- {
- NamespaceKey: "default",
- Key: "segment1",
- Name: "segment1",
- Description: "description",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- },
- },
- constraintReqs: []*flipt.CreateConstraintRequest{
- {
- NamespaceKey: "default",
- SegmentKey: "segment1",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "fizz",
- Operator: "neq",
- Value: "buzz",
- },
- },
- ruleReqs: []*flipt.CreateRuleRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag1",
- SegmentKey: "segment1",
- Rank: 1,
- },
- },
- distributionReqs: []*flipt.CreateDistributionRequest{
- {
- NamespaceKey: "default",
- RuleId: "static_rule_id",
- VariantId: "static_variant_id",
- FlagKey: "flag1",
- Rollout: 100,
- },
- },
- rolloutReqs: []*flipt.CreateRolloutRequest{
- {
- NamespaceKey: "default",
- FlagKey: "flag2",
- Description: "enabled for internal users",
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: "internal_users",
- Value: true,
- },
- },
- },
- {
- NamespaceKey: "default",
- FlagKey: "flag2",
- Description: "enabled for 50%",
- Rank: 2,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: 50.0,
- Value: true,
- },
- },
- },
- },
- },
- },
- {
- name: "import with flag complex metadata",
- path: "testdata/import_flag_complex_metadata",
- expected: &mockCreator{
- createflagReqs: []*flipt.CreateFlagRequest{
- {
- NamespaceKey: "default",
- Key: "test",
- Name: "test",
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- Metadata: newStruct(t, map[string]any{"args": map[string]any{"name": "value"}}),
- },
- },
- },
- },
- }
-
- for _, tc := range tests {
- tc := tc
- for _, ext := range extensions {
- t.Run(fmt.Sprintf("%s (%s)", tc.name, ext), func(t *testing.T) {
- creator := &mockCreator{}
- if tc.creator != nil {
- creator = tc.creator()
- }
- importer := NewImporter(creator)
-
- in, err := os.Open(tc.path + "." + string(ext))
- require.NoError(t, err)
- defer in.Close()
-
- err = importer.Import(context.Background(), ext, in, tc.skipExisting)
- require.NoError(t, err)
-
- assert.Equal(t, tc.expected, creator)
- })
- }
- }
-}
-
-func TestImport_Export(t *testing.T) {
- var (
- creator = &mockCreator{}
- importer = NewImporter(creator)
- )
-
- in, err := os.Open("testdata/export.yml")
- require.NoError(t, err)
- defer in.Close()
-
- err = importer.Import(context.Background(), EncodingYML, in, skipExistingFalse)
- require.NoError(t, err)
- assert.Equal(t, "default", creator.createflagReqs[0].NamespaceKey)
-}
-
-func TestImport_InvalidVersion(t *testing.T) {
- var (
- creator = &mockCreator{}
- importer = NewImporter(creator)
- )
-
- for _, ext := range extensions {
- in, err := os.Open("testdata/import_invalid_version." + string(ext))
- require.NoError(t, err)
- defer in.Close()
-
- err = importer.Import(context.Background(), ext, in, skipExistingFalse)
- assert.EqualError(t, err, "unsupported version: 5.0")
- }
-}
-
-func TestImport_FlagType_LTVersion1_1(t *testing.T) {
- var (
- creator = &mockCreator{}
- importer = NewImporter(creator)
- )
-
- for _, ext := range extensions {
- in, err := os.Open("testdata/import_v1_flag_type_not_supported." + string(ext))
- require.NoError(t, err)
- defer in.Close()
-
- err = importer.Import(context.Background(), ext, in, skipExistingFalse)
- assert.EqualError(t, err, "flag.type is supported in version >=1.1, found 1.0")
- }
-}
-
-func TestImport_Rollouts_LTVersion1_1(t *testing.T) {
- var (
- creator = &mockCreator{}
- importer = NewImporter(creator)
- )
-
- for _, ext := range extensions {
- in, err := os.Open("testdata/import_v1_rollouts_not_supported." + string(ext))
- require.NoError(t, err)
- defer in.Close()
-
- err = importer.Import(context.Background(), ext, in, skipExistingFalse)
- assert.EqualError(t, err, "flag.rollouts is supported in version >=1.1, found 1.0")
- }
-}
-
-func TestImport_Namespaces_Mix_And_Match(t *testing.T) {
- tests := []struct {
- name string
- path string
- expectedGetNSReqs int
- expectedCreateFlagReqs int
- expectedCreateSegmentReqs int
- }{
- {
- name: "single namespace no YAML stream",
- path: "testdata/import",
- expectedGetNSReqs: 0,
- expectedCreateFlagReqs: 2,
- expectedCreateSegmentReqs: 1,
- },
- {
- name: "single namespace foo non-YAML stream",
- path: "testdata/import_single_namespace_foo",
- expectedGetNSReqs: 1,
- expectedCreateFlagReqs: 2,
- expectedCreateSegmentReqs: 1,
- },
- {
- name: "multiple namespaces default and foo",
- path: "testdata/import_two_namespaces_default_and_foo",
- expectedGetNSReqs: 1,
- expectedCreateFlagReqs: 4,
- expectedCreateSegmentReqs: 2,
- },
- {
- name: "yaml stream only default namespace",
- path: "testdata/import_yaml_stream_default_namespace",
- expectedGetNSReqs: 0,
- expectedCreateFlagReqs: 4,
- expectedCreateSegmentReqs: 2,
- },
- {
- name: "yaml stream all unqiue namespaces",
- path: "testdata/import_yaml_stream_all_unique_namespaces",
- expectedGetNSReqs: 2,
- expectedCreateFlagReqs: 6,
- expectedCreateSegmentReqs: 3,
- },
- }
-
- for _, tc := range tests {
- tc := tc
- for _, ext := range extensions {
- t.Run(fmt.Sprintf("%s (%s)", tc.name, ext), func(t *testing.T) {
- var (
- creator = &mockCreator{}
- importer = NewImporter(creator)
- )
-
- in, err := os.Open(tc.path + "." + string(ext))
- require.NoError(t, err)
- defer in.Close()
-
- err = importer.Import(context.Background(), ext, in, skipExistingFalse)
- require.NoError(t, err)
-
- assert.Len(t, creator.getNSReqs, tc.expectedGetNSReqs)
- assert.Len(t, creator.createflagReqs, tc.expectedCreateFlagReqs)
- assert.Len(t, creator.segmentReqs, tc.expectedCreateSegmentReqs)
- })
- }
- }
-}
-
-//nolint:unparam
-func compact(t *testing.T, v string) string {
- t.Helper()
-
- var m any
- require.NoError(t, json.Unmarshal([]byte(v), &m))
-
- d, err := json.Marshal(m)
- require.NoError(t, err)
-
- return string(d)
-}
diff --git a/internal/info/flipt_test.go b/internal/info/flipt_test.go
index e17ba2e19a..8d10191ba5 100644
--- a/internal/info/flipt_test.go
+++ b/internal/info/flipt_test.go
@@ -30,12 +30,12 @@ func TestNew(t *testing.T) {
assert.Equal(t, "amd64", f.Arch)
assert.False(t, f.Authentication.Required)
assert.False(t, f.Analytics.Enabled)
- assert.Equal(t, config.DatabaseStorageType, f.Storage.Type)
+ assert.Equal(t, config.LocalStorageType, f.Storage.Type)
}
func TestHttpHandler(t *testing.T) {
f := New()
- f.Storage.Type = config.DatabaseStorageType
+ f.Storage.Type = config.LocalStorageType
r := httptest.NewRequest("GET", "/info", nil)
w := httptest.NewRecorder()
f.ServeHTTP(w, r)
diff --git a/internal/migrations/README.md b/internal/migrations/README.md
new file mode 100644
index 0000000000..a946a45978
--- /dev/null
+++ b/internal/migrations/README.md
@@ -0,0 +1,21 @@
+# Database Migrations
+
+This directory contains the database migrations for the v2 version of Flipt.
+
+We use [golang-migrate](https://github.com/golang-migrate/migrate) to create and manage the migrations.
+
+Currently the only database supported is Clickhouse for analytics.
+
+To create a new migration, run the following command:
+
+```sh
+migrate create -ext sql -dir ./migrations/{db}
+```
+
+Where `{db}` is the database type, e.g. `clickhouse`, etc.
+
+Example:
+
+```sh
+migrate create -ext sql -dir ./migrations/clickhouse create_table_X
+```
diff --git a/internal/migrations/clickhouse/20250121202120_create_table_counter_analytics.down.sql b/internal/migrations/clickhouse/20250121202120_create_table_counter_analytics.down.sql
new file mode 100644
index 0000000000..12c46349d4
--- /dev/null
+++ b/internal/migrations/clickhouse/20250121202120_create_table_counter_analytics.down.sql
@@ -0,0 +1 @@
+DROP TABLE IF EXISTS flipt_counter_analytics;
\ No newline at end of file
diff --git a/internal/migrations/clickhouse/20250121202120_create_table_counter_analytics.up.sql b/internal/migrations/clickhouse/20250121202120_create_table_counter_analytics.up.sql
new file mode 100644
index 0000000000..f0dc8c444d
--- /dev/null
+++ b/internal/migrations/clickhouse/20250121202120_create_table_counter_analytics.up.sql
@@ -0,0 +1,23 @@
+CREATE TABLE IF NOT EXISTS flipt_counter_analytics (
+ `timestamp` DateTime('UTC'),
+ `analytic_name` String,
+ `namespace_key` String,
+ `flag_key` String,
+ `flag_type` Enum(
+ 'VARIANT_FLAG_TYPE' = 1,
+ 'BOOLEAN_FLAG_TYPE' = 2
+ ),
+ `reason` Enum(
+ 'UNKNOWN_EVALUATION_REASON' = 1,
+ 'FLAG_DISABLED_EVALUATION_REASON' = 2,
+ 'MATCH_EVALUATION_REASON' = 3,
+ 'DEFAULT_EVALUATION_REASON' = 4
+ ),
+ `match` Nullable(Bool),
+ `evaluation_value` Nullable(String),
+ `entity_id` Nullable(String),
+ `value` UInt32
+)
+ENGINE = MergeTree
+ORDER BY timestamp
+TTL timestamp + INTERVAL 1 WEEK;
diff --git a/internal/migrations/clickhouse/20250121202326_create_table_counter_aggregated_analytics.down.sql b/internal/migrations/clickhouse/20250121202326_create_table_counter_aggregated_analytics.down.sql
new file mode 100644
index 0000000000..4aea271274
--- /dev/null
+++ b/internal/migrations/clickhouse/20250121202326_create_table_counter_aggregated_analytics.down.sql
@@ -0,0 +1 @@
+DROP TABLE IF EXISTS flipt_counter_aggregated_analytics;
\ No newline at end of file
diff --git a/config/migrations/clickhouse/1_default_aggregated.up.sql b/internal/migrations/clickhouse/20250121202326_create_table_counter_aggregated_analytics.up.sql
similarity index 76%
rename from config/migrations/clickhouse/1_default_aggregated.up.sql
rename to internal/migrations/clickhouse/20250121202326_create_table_counter_aggregated_analytics.up.sql
index c6a8a09ea5..e5c0e46929 100644
--- a/config/migrations/clickhouse/1_default_aggregated.up.sql
+++ b/internal/migrations/clickhouse/20250121202326_create_table_counter_aggregated_analytics.up.sql
@@ -9,5 +9,12 @@ CREATE TABLE flipt_counter_aggregated_analytics
`value` UInt32
)
ENGINE = SummingMergeTree
-ORDER BY (timestamp, analytic_name, namespace_key, flag_key, reason, evaluation_value)
+ORDER BY (
+ timestamp,
+ analytic_name,
+ namespace_key,
+ flag_key,
+ reason,
+ evaluation_value
+)
TTL timestamp + INTERVAL 1 WEEK;
\ No newline at end of file
diff --git a/internal/migrations/clickhouse/20250121202552_create_view_counter_aggregated_analytics.down.sql b/internal/migrations/clickhouse/20250121202552_create_view_counter_aggregated_analytics.down.sql
new file mode 100644
index 0000000000..c145dacad4
--- /dev/null
+++ b/internal/migrations/clickhouse/20250121202552_create_view_counter_aggregated_analytics.down.sql
@@ -0,0 +1 @@
+DROP VIEW IF EXISTS flipt_counter_aggregated_analytics_mv;
\ No newline at end of file
diff --git a/config/migrations/clickhouse/2_default_aggregated_view.up.sql b/internal/migrations/clickhouse/20250121202552_create_view_counter_aggregated_analytics.up.sql
similarity index 100%
rename from config/migrations/clickhouse/2_default_aggregated_view.up.sql
rename to internal/migrations/clickhouse/20250121202552_create_view_counter_aggregated_analytics.up.sql
diff --git a/config/migrations/migrations.go b/internal/migrations/migrations.go
similarity index 100%
rename from config/migrations/migrations.go
rename to internal/migrations/migrations.go
diff --git a/internal/oci/ecr/credentials_store.go b/internal/oci/ecr/credentials_store.go
deleted file mode 100644
index 84f34efcc7..0000000000
--- a/internal/oci/ecr/credentials_store.go
+++ /dev/null
@@ -1,77 +0,0 @@
-package ecr
-
-import (
- "context"
- "encoding/base64"
- "strings"
- "sync"
- "time"
-
- "oras.land/oras-go/v2/registry/remote/auth"
-)
-
-func defaultClientFunc(endpoint string) func(serverAddress string) Client {
- return func(serverAddress string) Client {
- switch {
- case strings.HasPrefix(serverAddress, "public.ecr.aws"):
- return NewPublicClient(endpoint)
- default:
- return NewPrivateClient(endpoint)
- }
- }
-}
-
-type cacheItem struct {
- credential auth.Credential
- expiresAt time.Time
-}
-
-func NewCredentialsStore(endpoint string) *CredentialsStore {
- return &CredentialsStore{
- cache: map[string]cacheItem{},
- clientFunc: defaultClientFunc(endpoint),
- }
-}
-
-type CredentialsStore struct {
- mu sync.Mutex
- cache map[string]cacheItem
- clientFunc func(serverAddress string) Client
-}
-
-// Get retrieves credentials from the store for the given server address.
-func (s *CredentialsStore) Get(ctx context.Context, serverAddress string) (auth.Credential, error) {
- s.mu.Lock()
- defer s.mu.Unlock()
- if item, ok := s.cache[serverAddress]; ok && time.Now().UTC().Before(item.expiresAt) {
- return item.credential, nil
- }
-
- token, expiresAt, err := s.clientFunc(serverAddress).GetAuthorizationToken(ctx)
- if err != nil {
- return auth.EmptyCredential, err
- }
- credential, err := s.extractCredential(token)
- if err != nil {
- return auth.EmptyCredential, err
- }
- s.cache[serverAddress] = cacheItem{credential, expiresAt}
- return credential, nil
-}
-
-func (s *CredentialsStore) extractCredential(token string) (auth.Credential, error) {
- output, err := base64.StdEncoding.DecodeString(token)
- if err != nil {
- return auth.EmptyCredential, err
- }
-
- userpass := strings.SplitN(string(output), ":", 2)
- if len(userpass) != 2 {
- return auth.EmptyCredential, auth.ErrBasicCredentialNotFound
- }
-
- return auth.Credential{
- Username: userpass[0],
- Password: userpass[1],
- }, nil
-}
diff --git a/internal/oci/ecr/credentials_store_test.go b/internal/oci/ecr/credentials_store_test.go
deleted file mode 100644
index 7a4ff2da89..0000000000
--- a/internal/oci/ecr/credentials_store_test.go
+++ /dev/null
@@ -1,93 +0,0 @@
-package ecr
-
-import (
- "context"
- "encoding/base64"
- "io"
- "testing"
- "time"
-
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/mock"
- "github.com/stretchr/testify/require"
- "oras.land/oras-go/v2/registry/remote/auth"
-)
-
-func TestDefaultClientFunc(t *testing.T) {
- require.IsType(t, &privateClient{}, defaultClientFunc("")("aws_account_id.dkr.ecr.region.amazonaws.com/team-a/app"))
- require.IsType(t, &publicClient{}, defaultClientFunc("")("public.ecr.aws/team-a/app"))
-}
-
-func TestECRCredential(t *testing.T) {
- for _, tt := range []struct {
- name string
- token string
- username string
- password string
- err error
- }{
- {
- name: "invalid base64 token",
- token: "invalid",
- err: base64.CorruptInputError(4),
- },
- {
- name: "invalid format token",
- token: "dXNlcl9uYW1lcGFzc3dvcmQ=",
- err: auth.ErrBasicCredentialNotFound,
- },
- {
- name: "valid token",
- token: "dXNlcl9uYW1lOnBhc3N3b3Jk",
- username: "user_name",
- password: "password",
- },
- } {
- t.Run(tt.name, func(t *testing.T) {
- r := &CredentialsStore{}
- credential, err := r.extractCredential(tt.token)
- assert.Equal(t, tt.err, err)
- assert.Equal(t, tt.username, credential.Username)
- assert.Equal(t, tt.password, credential.Password)
- })
- }
-}
-
-func TestCredential(t *testing.T) {
- t.Run("on error", func(t *testing.T) {
- m := NewMockClient(t)
- m.On("GetAuthorizationToken", mock.Anything).Return("", time.Time{}, io.ErrUnexpectedEOF)
- r := &CredentialsStore{
- cache: map[string]cacheItem{},
- clientFunc: func(serverAddress string) Client { return m },
- }
- _, err := r.Get(context.Background(), "")
- assert.ErrorIs(t, err, io.ErrUnexpectedEOF)
- })
-
- t.Run("on extract failure", func(t *testing.T) {
- m := NewMockClient(t)
- m.On("GetAuthorizationToken", mock.Anything).Return("failure", time.Time{}, nil)
- r := &CredentialsStore{
- cache: map[string]cacheItem{},
- clientFunc: func(serverAddress string) Client { return m },
- }
- _, err := r.Get(context.Background(), "")
- assert.Error(t, err)
- })
-
- t.Run("on success with cached", func(t *testing.T) {
- m := NewMockClient(t)
- m.On("GetAuthorizationToken", mock.Anything).Return("dXNlcl9uYW1lOnBhc3N3b3Jk", time.Now().Add(time.Minute), nil).Once()
- r := &CredentialsStore{
- cache: map[string]cacheItem{},
- clientFunc: func(serverAddress string) Client { return m },
- }
- for i := 0; i < 3; i++ {
- credential, err := r.Get(context.Background(), "")
- require.NoError(t, err)
- assert.Equal(t, "user_name", credential.Username)
- assert.Equal(t, "password", credential.Password)
- }
- })
-}
diff --git a/internal/oci/ecr/ecr.go b/internal/oci/ecr/ecr.go
deleted file mode 100644
index 6c56ee8c6d..0000000000
--- a/internal/oci/ecr/ecr.go
+++ /dev/null
@@ -1,113 +0,0 @@
-package ecr
-
-import (
- "context"
- "errors"
- "time"
-
- "github.com/aws/aws-sdk-go-v2/config"
- "github.com/aws/aws-sdk-go-v2/service/ecr"
- "github.com/aws/aws-sdk-go-v2/service/ecrpublic"
- "oras.land/oras-go/v2/registry/remote/auth"
-)
-
-var ErrNoAWSECRAuthorizationData = errors.New("no ecr authorization data provided")
-
-// Credential returns a Credential() function that can be used by auth.Client.
-func Credential(store *CredentialsStore) auth.CredentialFunc {
- return func(ctx context.Context, hostport string) (auth.Credential, error) {
- return store.Get(ctx, hostport)
- }
-}
-
-// PrivateClient interface defines methods for interacting with a private Amazon ECR registry
-type PrivateClient interface {
- // GetAuthorizationToken retrieves an authorization token for accessing a private ECR registry
- GetAuthorizationToken(ctx context.Context, params *ecr.GetAuthorizationTokenInput, optFns ...func(*ecr.Options)) (*ecr.GetAuthorizationTokenOutput, error)
-}
-
-// PublicClient interface defines methods for interacting with a public Amazon ECR registry
-type PublicClient interface {
- // GetAuthorizationToken retrieves an authorization token for accessing a public ECR registry
- GetAuthorizationToken(ctx context.Context, params *ecrpublic.GetAuthorizationTokenInput, optFns ...func(*ecrpublic.Options)) (*ecrpublic.GetAuthorizationTokenOutput, error)
-}
-
-// Client interface defines a generic method for getting an authorization token.
-// This interface will be implemented by PrivateClient and PublicClient
-type Client interface {
- // GetAuthorizationToken retrieves an authorization token for accessing an ECR registry (private or public)
- GetAuthorizationToken(ctx context.Context) (string, time.Time, error)
-}
-
-func NewPublicClient(endpoint string) Client {
- return &publicClient{endpoint: endpoint}
-}
-
-type publicClient struct {
- client PublicClient
- endpoint string
-}
-
-func (r *publicClient) GetAuthorizationToken(ctx context.Context) (string, time.Time, error) {
- client := r.client
- if client == nil {
- cfg, err := config.LoadDefaultConfig(context.Background())
- if err != nil {
- return "", time.Time{}, err
- }
- client = ecrpublic.NewFromConfig(cfg, func(o *ecrpublic.Options) {
- if r.endpoint != "" {
- o.BaseEndpoint = &r.endpoint
- }
- })
- }
- response, err := client.GetAuthorizationToken(ctx, &ecrpublic.GetAuthorizationTokenInput{})
- if err != nil {
- return "", time.Time{}, err
- }
- authData := response.AuthorizationData
- if authData == nil {
- return "", time.Time{}, ErrNoAWSECRAuthorizationData
- }
- if authData.AuthorizationToken == nil {
- return "", time.Time{}, auth.ErrBasicCredentialNotFound
- }
- return *authData.AuthorizationToken, *authData.ExpiresAt, nil
-}
-
-func NewPrivateClient(endpoint string) Client {
- return &privateClient{endpoint: endpoint}
-}
-
-type privateClient struct {
- client PrivateClient
- endpoint string
-}
-
-func (r *privateClient) GetAuthorizationToken(ctx context.Context) (string, time.Time, error) {
- client := r.client
- if client == nil {
- cfg, err := config.LoadDefaultConfig(ctx)
- if err != nil {
- return "", time.Time{}, err
- }
- client = ecr.NewFromConfig(cfg, func(o *ecr.Options) {
- if r.endpoint != "" {
- o.BaseEndpoint = &r.endpoint
- }
- })
- }
- response, err := client.GetAuthorizationToken(ctx, &ecr.GetAuthorizationTokenInput{})
- if err != nil {
- return "", time.Time{}, err
- }
- if len(response.AuthorizationData) == 0 {
- return "", time.Time{}, ErrNoAWSECRAuthorizationData
- }
- authData := response.AuthorizationData[0]
-
- if authData.AuthorizationToken == nil {
- return "", time.Time{}, auth.ErrBasicCredentialNotFound
- }
- return *authData.AuthorizationToken, *authData.ExpiresAt, nil
-}
diff --git a/internal/oci/ecr/ecr_test.go b/internal/oci/ecr/ecr_test.go
deleted file mode 100644
index 0b04b10e69..0000000000
--- a/internal/oci/ecr/ecr_test.go
+++ /dev/null
@@ -1,105 +0,0 @@
-package ecr
-
-import (
- "context"
- "errors"
- "testing"
- "time"
-
- "github.com/aws/aws-sdk-go-v2/service/ecr"
- "github.com/aws/aws-sdk-go-v2/service/ecr/types"
- "github.com/aws/aws-sdk-go-v2/service/ecrpublic"
- ptypes "github.com/aws/aws-sdk-go-v2/service/ecrpublic/types"
- "github.com/stretchr/testify/mock"
- "github.com/stretchr/testify/require"
- "oras.land/oras-go/v2/registry/remote/auth"
-)
-
-func TestPrivateClient(t *testing.T) {
- t.Run("on error", func(t *testing.T) {
- m := NewMockPrivateClient(t)
- m.On("GetAuthorizationToken", mock.Anything, mock.Anything).Return(nil, errors.ErrUnsupported)
- client := privateClient{client: m}
- _, _, err := client.GetAuthorizationToken(context.Background())
- require.ErrorIs(t, err, errors.ErrUnsupported)
- })
- t.Run("empty auth", func(t *testing.T) {
- m := NewMockPrivateClient(t)
- m.On("GetAuthorizationToken", mock.Anything, mock.Anything).Return(&ecr.GetAuthorizationTokenOutput{
- AuthorizationData: []types.AuthorizationData{},
- }, nil)
- client := privateClient{client: m}
- _, _, err := client.GetAuthorizationToken(context.Background())
- require.ErrorIs(t, err, ErrNoAWSECRAuthorizationData)
- })
- t.Run("nil auth token", func(t *testing.T) {
- m := NewMockPrivateClient(t)
- m.On("GetAuthorizationToken", mock.Anything, mock.Anything).Return(&ecr.GetAuthorizationTokenOutput{
- AuthorizationData: []types.AuthorizationData{
- {AuthorizationToken: nil},
- },
- }, nil)
- client := privateClient{client: m}
- _, _, err := client.GetAuthorizationToken(context.Background())
- require.ErrorIs(t, err, auth.ErrBasicCredentialNotFound)
- })
- t.Run("get auth token", func(t *testing.T) {
- wantExpiresAt := time.Now()
- wantToken := "some:token"
- m := NewMockPrivateClient(t)
- m.On("GetAuthorizationToken", mock.Anything, mock.Anything).Return(&ecr.GetAuthorizationTokenOutput{
- AuthorizationData: []types.AuthorizationData{
- {AuthorizationToken: &wantToken, ExpiresAt: &wantExpiresAt},
- },
- }, nil)
- client := privateClient{client: m}
- token, expiresAt, err := client.GetAuthorizationToken(context.Background())
- require.NoError(t, err)
- require.Equal(t, wantToken, token)
- require.Equal(t, wantExpiresAt, expiresAt)
- })
- t.Run("default client", func(t *testing.T) {
- client := privateClient{}
- _, _, err := client.GetAuthorizationToken(context.Background())
- require.Error(t, err)
- })
-}
-
-func TestPublicClient(t *testing.T) {
- t.Run("on error", func(t *testing.T) {
- m := NewMockPublicClient(t)
- m.On("GetAuthorizationToken", mock.Anything, mock.Anything).Return(nil, errors.ErrUnsupported)
- client := publicClient{client: m}
- _, _, err := client.GetAuthorizationToken(context.Background())
- require.ErrorIs(t, err, errors.ErrUnsupported)
- })
- t.Run("nil auth token", func(t *testing.T) {
- m := NewMockPublicClient(t)
- m.On("GetAuthorizationToken", mock.Anything, mock.Anything).Return(&ecrpublic.GetAuthorizationTokenOutput{
- AuthorizationData: &ptypes.AuthorizationData{AuthorizationToken: nil},
- }, nil)
- client := publicClient{client: m}
- _, _, err := client.GetAuthorizationToken(context.Background())
- require.ErrorIs(t, err, auth.ErrBasicCredentialNotFound)
- })
- t.Run("get auth token", func(t *testing.T) {
- wantExpiresAt := time.Now()
- wantToken := "some:token"
- m := NewMockPublicClient(t)
- m.On("GetAuthorizationToken", mock.Anything, mock.Anything).Return(&ecrpublic.GetAuthorizationTokenOutput{
- AuthorizationData: &ptypes.AuthorizationData{
- AuthorizationToken: &wantToken, ExpiresAt: &wantExpiresAt,
- },
- }, nil)
- client := publicClient{client: m}
- token, expiresAt, err := client.GetAuthorizationToken(context.Background())
- require.NoError(t, err)
- require.Equal(t, wantToken, token)
- require.Equal(t, wantExpiresAt, expiresAt)
- })
- t.Run("default client", func(t *testing.T) {
- client := publicClient{}
- _, _, err := client.GetAuthorizationToken(context.Background())
- require.Error(t, err)
- })
-}
diff --git a/internal/oci/ecr/mock_Client_test.go b/internal/oci/ecr/mock_Client_test.go
deleted file mode 100644
index cce10cf816..0000000000
--- a/internal/oci/ecr/mock_Client_test.go
+++ /dev/null
@@ -1,64 +0,0 @@
-// Code generated by mockery v2.42.1. DO NOT EDIT.
-
-package ecr
-
-import (
- context "context"
- time "time"
-
- mock "github.com/stretchr/testify/mock"
-)
-
-// MockClient is an autogenerated mock type for the Client type
-type MockClient struct {
- mock.Mock
-}
-
-// GetAuthorizationToken provides a mock function with given fields: ctx
-func (_m *MockClient) GetAuthorizationToken(ctx context.Context) (string, time.Time, error) {
- ret := _m.Called(ctx)
-
- if len(ret) == 0 {
- panic("no return value specified for GetAuthorizationToken")
- }
-
- var r0 string
- var r1 time.Time
- var r2 error
- if rf, ok := ret.Get(0).(func(context.Context) (string, time.Time, error)); ok {
- return rf(ctx)
- }
- if rf, ok := ret.Get(0).(func(context.Context) string); ok {
- r0 = rf(ctx)
- } else {
- r0 = ret.Get(0).(string)
- }
-
- if rf, ok := ret.Get(1).(func(context.Context) time.Time); ok {
- r1 = rf(ctx)
- } else {
- r1 = ret.Get(1).(time.Time)
- }
-
- if rf, ok := ret.Get(2).(func(context.Context) error); ok {
- r2 = rf(ctx)
- } else {
- r2 = ret.Error(2)
- }
-
- return r0, r1, r2
-}
-
-// NewMockClient creates a new instance of MockClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
-// The first argument is typically a *testing.T value.
-func NewMockClient(t interface {
- mock.TestingT
- Cleanup(func())
-}) *MockClient {
- mock := &MockClient{}
- mock.Mock.Test(t)
-
- t.Cleanup(func() { mock.AssertExpectations(t) })
-
- return mock
-}
diff --git a/internal/oci/ecr/mock_PrivateClient_test.go b/internal/oci/ecr/mock_PrivateClient_test.go
deleted file mode 100644
index e4b8f22fdf..0000000000
--- a/internal/oci/ecr/mock_PrivateClient_test.go
+++ /dev/null
@@ -1,66 +0,0 @@
-// Code generated by mockery v2.42.1. DO NOT EDIT.
-
-package ecr
-
-import (
- context "context"
-
- serviceecr "github.com/aws/aws-sdk-go-v2/service/ecr"
- mock "github.com/stretchr/testify/mock"
-)
-
-// MockPrivateClient is an autogenerated mock type for the PrivateClient type
-type MockPrivateClient struct {
- mock.Mock
-}
-
-// GetAuthorizationToken provides a mock function with given fields: ctx, params, optFns
-func (_m *MockPrivateClient) GetAuthorizationToken(ctx context.Context, params *serviceecr.GetAuthorizationTokenInput, optFns ...func(*serviceecr.Options)) (*serviceecr.GetAuthorizationTokenOutput, error) {
- _va := make([]interface{}, len(optFns))
- for _i := range optFns {
- _va[_i] = optFns[_i]
- }
- var _ca []interface{}
- _ca = append(_ca, ctx, params)
- _ca = append(_ca, _va...)
- ret := _m.Called(_ca...)
-
- if len(ret) == 0 {
- panic("no return value specified for GetAuthorizationToken")
- }
-
- var r0 *serviceecr.GetAuthorizationTokenOutput
- var r1 error
- if rf, ok := ret.Get(0).(func(context.Context, *serviceecr.GetAuthorizationTokenInput, ...func(*serviceecr.Options)) (*serviceecr.GetAuthorizationTokenOutput, error)); ok {
- return rf(ctx, params, optFns...)
- }
- if rf, ok := ret.Get(0).(func(context.Context, *serviceecr.GetAuthorizationTokenInput, ...func(*serviceecr.Options)) *serviceecr.GetAuthorizationTokenOutput); ok {
- r0 = rf(ctx, params, optFns...)
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).(*serviceecr.GetAuthorizationTokenOutput)
- }
- }
-
- if rf, ok := ret.Get(1).(func(context.Context, *serviceecr.GetAuthorizationTokenInput, ...func(*serviceecr.Options)) error); ok {
- r1 = rf(ctx, params, optFns...)
- } else {
- r1 = ret.Error(1)
- }
-
- return r0, r1
-}
-
-// NewMockPrivateClient creates a new instance of MockPrivateClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
-// The first argument is typically a *testing.T value.
-func NewMockPrivateClient(t interface {
- mock.TestingT
- Cleanup(func())
-}) *MockPrivateClient {
- mock := &MockPrivateClient{}
- mock.Mock.Test(t)
-
- t.Cleanup(func() { mock.AssertExpectations(t) })
-
- return mock
-}
diff --git a/internal/oci/ecr/mock_PublicClient_test.go b/internal/oci/ecr/mock_PublicClient_test.go
deleted file mode 100644
index 67c597d2d8..0000000000
--- a/internal/oci/ecr/mock_PublicClient_test.go
+++ /dev/null
@@ -1,66 +0,0 @@
-// Code generated by mockery v2.42.1. DO NOT EDIT.
-
-package ecr
-
-import (
- context "context"
-
- ecrpublic "github.com/aws/aws-sdk-go-v2/service/ecrpublic"
- mock "github.com/stretchr/testify/mock"
-)
-
-// MockPublicClient is an autogenerated mock type for the PublicClient type
-type MockPublicClient struct {
- mock.Mock
-}
-
-// GetAuthorizationToken provides a mock function with given fields: ctx, params, optFns
-func (_m *MockPublicClient) GetAuthorizationToken(ctx context.Context, params *ecrpublic.GetAuthorizationTokenInput, optFns ...func(*ecrpublic.Options)) (*ecrpublic.GetAuthorizationTokenOutput, error) {
- _va := make([]interface{}, len(optFns))
- for _i := range optFns {
- _va[_i] = optFns[_i]
- }
- var _ca []interface{}
- _ca = append(_ca, ctx, params)
- _ca = append(_ca, _va...)
- ret := _m.Called(_ca...)
-
- if len(ret) == 0 {
- panic("no return value specified for GetAuthorizationToken")
- }
-
- var r0 *ecrpublic.GetAuthorizationTokenOutput
- var r1 error
- if rf, ok := ret.Get(0).(func(context.Context, *ecrpublic.GetAuthorizationTokenInput, ...func(*ecrpublic.Options)) (*ecrpublic.GetAuthorizationTokenOutput, error)); ok {
- return rf(ctx, params, optFns...)
- }
- if rf, ok := ret.Get(0).(func(context.Context, *ecrpublic.GetAuthorizationTokenInput, ...func(*ecrpublic.Options)) *ecrpublic.GetAuthorizationTokenOutput); ok {
- r0 = rf(ctx, params, optFns...)
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).(*ecrpublic.GetAuthorizationTokenOutput)
- }
- }
-
- if rf, ok := ret.Get(1).(func(context.Context, *ecrpublic.GetAuthorizationTokenInput, ...func(*ecrpublic.Options)) error); ok {
- r1 = rf(ctx, params, optFns...)
- } else {
- r1 = ret.Error(1)
- }
-
- return r0, r1
-}
-
-// NewMockPublicClient creates a new instance of MockPublicClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
-// The first argument is typically a *testing.T value.
-func NewMockPublicClient(t interface {
- mock.TestingT
- Cleanup(func())
-}) *MockPublicClient {
- mock := &MockPublicClient{}
- mock.Mock.Test(t)
-
- t.Cleanup(func() { mock.AssertExpectations(t) })
-
- return mock
-}
diff --git a/internal/oci/file.go b/internal/oci/file.go
deleted file mode 100644
index b7948c33e1..0000000000
--- a/internal/oci/file.go
+++ /dev/null
@@ -1,534 +0,0 @@
-package oci
-
-import (
- "bytes"
- "context"
- "encoding/json"
- "errors"
- "fmt"
- "io"
- "io/fs"
- "os"
- "path"
- "path/filepath"
- "strings"
- "time"
-
- "github.com/opencontainers/go-digest"
- v1 "github.com/opencontainers/image-spec/specs-go/v1"
- "go.flipt.io/flipt/internal/containers"
- "go.flipt.io/flipt/internal/ext"
- "go.flipt.io/flipt/internal/storage"
- storagefs "go.flipt.io/flipt/internal/storage/fs"
- "go.uber.org/zap"
- "oras.land/oras-go/v2"
- "oras.land/oras-go/v2/content"
- "oras.land/oras-go/v2/content/memory"
- "oras.land/oras-go/v2/content/oci"
- "oras.land/oras-go/v2/errdef"
- "oras.land/oras-go/v2/registry"
- "oras.land/oras-go/v2/registry/remote"
- "oras.land/oras-go/v2/registry/remote/auth"
- "oras.land/oras-go/v2/registry/remote/retry"
-)
-
-const (
- SchemeHTTP = "http"
- SchemeHTTPS = "https"
- SchemeFlipt = "flipt"
-)
-
-type credentialFunc func(registry string) auth.CredentialFunc
-
-// Store is a type which can retrieve Flipt feature files from a target repository and reference
-// Repositories can be local (OCI layout directories on the filesystem) or a remote registry
-type Store struct {
- opts StoreOptions
- logger *zap.Logger
- local oras.Target
-}
-
-// NewStore constructs and configures an instance of *Store for the provided config
-func NewStore(logger *zap.Logger, dir string, opts ...containers.Option[StoreOptions]) (*Store, error) {
- store := &Store{
- opts: StoreOptions{
- bundleDir: dir,
- manifestVersion: oras.PackManifestVersion1_1,
- },
- logger: logger,
- local: memory.New(),
- }
-
- containers.ApplyAll(&store.opts, opts...)
-
- return store, nil
-}
-
-type Reference struct {
- registry.Reference
- Scheme string
-}
-
-func ParseReference(repository string) (Reference, error) {
- scheme, repository, match := strings.Cut(repository, "://")
- // support empty scheme as remote and https
- if !match {
- repository = scheme
- scheme = SchemeHTTPS
- }
-
- if !strings.Contains(repository, "/") {
- repository = "local/" + repository
- scheme = SchemeFlipt
- }
-
- ref, err := registry.ParseReference(repository)
- if err != nil {
- return Reference{}, err
- }
-
- switch scheme {
- case SchemeHTTP, SchemeHTTPS:
- case SchemeFlipt:
- if ref.Registry != "local" {
- return Reference{}, fmt.Errorf("unexpected local reference: %q", ref)
- }
- default:
- return Reference{}, fmt.Errorf("unexpected repository scheme: %q should be one of [http|https|flipt]", scheme)
- }
-
- return Reference{
- Reference: ref,
- Scheme: scheme,
- }, nil
-}
-
-func (s *Store) getTarget(ref Reference) (oras.Target, error) {
- switch ref.Scheme {
- case SchemeHTTP, SchemeHTTPS:
- remote, err := remote.NewRepository(fmt.Sprintf("%s/%s", ref.Registry, ref.Repository))
- if err != nil {
- return nil, err
- }
-
- remote.PlainHTTP = ref.Scheme == "http"
-
- if s.opts.auth != nil {
- remote.Client = &auth.Client{
- Credential: s.opts.auth(ref.Registry),
- Cache: s.opts.authCache,
- Client: retry.DefaultClient,
- }
- }
-
- return remote, nil
- case SchemeFlipt:
- // build the store once to ensure it is valid
- store, err := oci.New(path.Join(s.opts.bundleDir, ref.Repository))
- if err != nil {
- return nil, err
- }
-
- store.AutoSaveIndex = true
-
- return store, nil
- }
-
- return nil, fmt.Errorf("unexpected repository scheme: %q should be one of [http|https|flipt]", ref.Scheme)
-}
-
-// FetchOptions configures a call to Fetch
-type FetchOptions struct {
- IfNoMatch digest.Digest
-}
-
-// FetchResponse contains any fetched files for the given tracked reference
-// If Matched == true, then the supplied IfNoMatch digest matched and Files should be nil
-type FetchResponse struct {
- Digest digest.Digest
- Files []fs.File
- Matched bool
-}
-
-// IfNoMatch configures the call to Fetch to return early if the supplied
-// digest matches the target manifest pointed at by the underlying reference
-// This is a cache optimization to skip re-fetching resources if the contents
-// has already been seen by the caller
-func IfNoMatch(digest digest.Digest) containers.Option[FetchOptions] {
- return func(fo *FetchOptions) {
- fo.IfNoMatch = digest
- }
-}
-
-// Fetch retrieves the associated files for the tracked repository and reference
-// It can optionally be configured to skip fetching given the caller has a digest
-// that matches the current reference target
-func (s *Store) Fetch(ctx context.Context, ref Reference, opts ...containers.Option[FetchOptions]) (*FetchResponse, error) {
- var options FetchOptions
- containers.ApplyAll(&options, opts...)
-
- store, err := s.getTarget(ref)
- if err != nil {
- return nil, err
- }
-
- desc, err := oras.Copy(ctx,
- store,
- ref.Reference.Reference,
- s.local,
- ref.Reference.Reference,
- oras.DefaultCopyOptions)
- if err != nil {
- return nil, err
- }
-
- bytes, err := content.FetchAll(ctx, s.local, desc)
- if err != nil {
- return nil, err
- }
-
- var manifest v1.Manifest
- if err = json.Unmarshal(bytes, &manifest); err != nil {
- return nil, err
- }
-
- var d digest.Digest
- {
- // shadow manifest so that we can safely
- // strip annotations before calculating
- // the digest
- manifest := manifest
- manifest.Annotations = map[string]string{}
- bytes, err := json.Marshal(&manifest)
- if err != nil {
- return nil, err
- }
-
- d = digest.FromBytes(bytes)
- if d == options.IfNoMatch {
- return &FetchResponse{Matched: true, Digest: d}, nil
- }
- }
-
- files, err := s.fetchFiles(ctx, store, manifest)
- if err != nil {
- return nil, err
- }
-
- return &FetchResponse{Files: files, Digest: d}, nil
-}
-
-// fetchFiles retrieves the associated flipt feature content files from the content fetcher.
-// It traverses the provided manifests and returns a slice of file instances with appropriate
-// content type extensions.
-func (s *Store) fetchFiles(ctx context.Context, store oras.ReadOnlyTarget, manifest v1.Manifest) ([]fs.File, error) {
- var files []fs.File
-
- created, err := time.Parse(time.RFC3339, manifest.Annotations[v1.AnnotationCreated])
- if err != nil {
- return nil, err
- }
-
- for _, layer := range manifest.Layers {
- mediaType, encoding, err := getMediaTypeAndEncoding(layer)
- if err != nil {
- return nil, fmt.Errorf("layer %q: %w", layer.Digest, err)
- }
-
- if mediaType != MediaTypeFliptNamespace {
- return nil, fmt.Errorf("layer %q: type %q: %w", layer.Digest, mediaType, ErrUnexpectedMediaType)
- }
-
- switch encoding {
- case "", "json", "yaml", "yml":
- default:
- return nil, fmt.Errorf("layer %q: unexpected layer encoding: %q", layer.Digest, encoding)
- }
-
- rc, err := store.Fetch(ctx, layer)
- if err != nil {
- return nil, err
- }
-
- files = append(files, &File{
- ReadCloser: rc,
- info: FileInfo{
- desc: layer,
- encoding: encoding,
- mod: created,
- },
- })
- }
-
- return files, nil
-}
-
-// Bundle is a record of an existing Flipt feature bundle
-type Bundle struct {
- Digest digest.Digest
- Repository string
- Tag string
- CreatedAt time.Time
-}
-
-// List returns a slice of bundles available on the host
-func (s *Store) List(ctx context.Context) (bundles []Bundle, _ error) {
- fi, err := os.Open(s.opts.bundleDir)
- if err != nil {
- return nil, err
- }
-
- defer fi.Close()
-
- entries, err := fi.ReadDir(-1)
- if err != nil {
- return nil, err
- }
-
- for _, entry := range entries {
- bytes, err := os.ReadFile(filepath.Join(s.opts.bundleDir, entry.Name(), v1.ImageIndexFile))
- if err != nil {
- if errors.Is(err, os.ErrNotExist) {
- return nil, nil
- }
-
- return nil, err
- }
-
- var index v1.Index
- if err := json.Unmarshal(bytes, &index); err != nil {
- return nil, err
- }
-
- for _, manifest := range index.Manifests {
- digest := manifest.Digest
- path := filepath.Join(s.opts.bundleDir, entry.Name(), "blobs", digest.Algorithm().String(), digest.Hex())
- bytes, err := os.ReadFile(path)
- if err != nil {
- return nil, err
- }
-
- var man v1.Manifest
- if err := json.Unmarshal(bytes, &man); err != nil {
- return nil, err
- }
-
- bundle := Bundle{
- Digest: manifest.Digest,
- Repository: entry.Name(),
- Tag: manifest.Annotations[v1.AnnotationRefName],
- }
-
- bundle.CreatedAt, err = parseCreated(man.Annotations)
- if err != nil {
- return nil, err
- }
-
- bundles = append(bundles, bundle)
- }
- }
-
- return
-}
-
-// Build bundles the target directory Flipt feature state into the target configured on the Store
-// It returns a Bundle which contains metadata regarding the resulting bundle details
-func (s *Store) Build(ctx context.Context, src fs.FS, ref Reference) (Bundle, error) {
- store, err := s.getTarget(ref)
- if err != nil {
- return Bundle{}, err
- }
-
- layers, err := s.buildLayers(ctx, store, src)
- if err != nil {
- return Bundle{}, err
- }
-
- desc, err := oras.PackManifest(ctx, store, s.opts.manifestVersion, MediaTypeFliptFeatures, oras.PackManifestOptions{
- ManifestAnnotations: map[string]string{},
- Layers: layers,
- })
- if err != nil {
- return Bundle{}, err
- }
-
- if ref.Reference.Reference != "" {
- if err := store.Tag(ctx, desc, ref.Reference.Reference); err != nil {
- return Bundle{}, err
- }
- }
-
- bundle := Bundle{
- Digest: desc.Digest,
- Repository: ref.Repository,
- Tag: ref.Reference.Reference,
- }
-
- bundle.CreatedAt, err = parseCreated(desc.Annotations)
- if err != nil {
- return Bundle{}, err
- }
-
- return bundle, nil
-}
-
-func (s *Store) buildLayers(ctx context.Context, store oras.Target, src fs.FS) (layers []v1.Descriptor, _ error) {
- if err := storagefs.WalkDocuments(s.logger, src, func(doc *ext.Document) error {
- payload, err := json.Marshal(&doc)
- if err != nil {
- return err
- }
-
- var namespaceKey string
- if doc.Namespace == nil {
- namespaceKey = storage.DefaultNamespace
- } else {
- namespaceKey = doc.Namespace.GetKey()
- }
-
- desc := v1.Descriptor{
- Digest: digest.FromBytes(payload),
- Size: int64(len(payload)),
- MediaType: MediaTypeFliptNamespace,
- Annotations: map[string]string{
- AnnotationFliptNamespace: namespaceKey,
- },
- }
-
- s.logger.Debug("adding layer", zap.String("digest", desc.Digest.Hex()), zap.String("namespace", namespaceKey))
-
- if err := store.Push(ctx, desc, bytes.NewReader(payload)); err != nil && !errors.Is(err, errdef.ErrAlreadyExists) {
- return err
- }
-
- layers = append(layers, desc)
- return nil
- }); err != nil {
- return nil, err
- }
- return layers, nil
-}
-
-func (s *Store) Copy(ctx context.Context, src, dst Reference) (Bundle, error) {
- if src.Reference.Reference == "" {
- return Bundle{}, fmt.Errorf("source bundle: %w", ErrReferenceRequired)
- }
-
- if dst.Reference.Reference == "" {
- return Bundle{}, fmt.Errorf("destination bundle: %w", ErrReferenceRequired)
- }
-
- srcTarget, err := s.getTarget(src)
- if err != nil {
- return Bundle{}, err
- }
-
- dstTarget, err := s.getTarget(dst)
- if err != nil {
- return Bundle{}, err
- }
-
- desc, err := oras.Copy(
- ctx,
- srcTarget,
- src.Reference.Reference,
- dstTarget,
- dst.Reference.Reference,
- oras.DefaultCopyOptions)
- if err != nil {
- return Bundle{}, err
- }
-
- rd, err := dstTarget.Fetch(ctx, desc)
- if err != nil {
- return Bundle{}, err
- }
-
- var man v1.Manifest
- if err := json.NewDecoder(rd).Decode(&man); err != nil {
- return Bundle{}, err
- }
-
- bundle := Bundle{
- Digest: desc.Digest,
- Repository: dst.Repository,
- Tag: dst.Reference.Reference,
- }
-
- bundle.CreatedAt, err = parseCreated(man.Annotations)
- if err != nil {
- return Bundle{}, err
- }
-
- return bundle, nil
-}
-
-func getMediaTypeAndEncoding(layer v1.Descriptor) (mediaType, encoding string, _ error) {
- var ok bool
- if mediaType = layer.MediaType; mediaType == "" {
- return "", "", ErrMissingMediaType
- }
-
- if mediaType, encoding, ok = strings.Cut(mediaType, "+"); !ok {
- encoding = "json"
- }
-
- return
-}
-
-// File is a wrapper around a flipt feature state files contents.
-type File struct {
- io.ReadCloser
- info FileInfo
-}
-
-// Seek attempts to seek the embedded read-closer.
-// If the embedded read closer implements seek, then it delegates
-// to that instances implementation. Alternatively, it returns
-// an error signifying that the File cannot be seeked.
-func (f *File) Seek(offset int64, whence int) (int64, error) {
- if seek, ok := f.ReadCloser.(io.Seeker); ok {
- return seek.Seek(offset, whence)
- }
-
- return 0, errors.New("seeker cannot seek")
-}
-
-func (f *File) Stat() (fs.FileInfo, error) {
- return &f.info, nil
-}
-
-// FileInfo describes a flipt features state file instance.
-type FileInfo struct {
- desc v1.Descriptor
- encoding string
- mod time.Time
-}
-
-func (f FileInfo) Name() string {
- return f.desc.Digest.Hex() + "." + f.encoding
-}
-
-func (f FileInfo) Size() int64 {
- return f.desc.Size
-}
-
-func (f FileInfo) Mode() fs.FileMode {
- return fs.ModePerm
-}
-
-func (f FileInfo) ModTime() time.Time {
- return f.mod
-}
-
-func (f FileInfo) IsDir() bool {
- return false
-}
-
-func (f FileInfo) Sys() any {
- return nil
-}
-
-func parseCreated(annotations map[string]string) (time.Time, error) {
- return time.Parse(time.RFC3339, annotations[v1.AnnotationCreated])
-}
diff --git a/internal/oci/file_test.go b/internal/oci/file_test.go
deleted file mode 100644
index fb49e2ff10..0000000000
--- a/internal/oci/file_test.go
+++ /dev/null
@@ -1,524 +0,0 @@
-package oci
-
-import (
- "bytes"
- "context"
- "embed"
- "encoding/base64"
- "errors"
- "fmt"
- "io"
- "io/fs"
- "log"
- "net/http"
- "net/http/httptest"
- "path"
- "strings"
- "testing"
- "time"
-
- containerregistry "github.com/google/go-containerregistry/pkg/registry"
- "github.com/opencontainers/go-digest"
- v1 "github.com/opencontainers/image-spec/specs-go/v1"
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
- "go.uber.org/zap/zaptest"
- "oras.land/oras-go/v2"
- "oras.land/oras-go/v2/content/oci"
- "oras.land/oras-go/v2/registry"
-)
-
-const repo = "testrepo"
-
-func TestParseReference(t *testing.T) {
- for _, test := range []struct {
- name string
- reference string
- expected Reference
- expectedErr error
- }{
- {
- name: "unexpected scheme",
- reference: "fake://local/something:latest",
- expectedErr: errors.New(`unexpected repository scheme: "fake" should be one of [http|https|flipt]`),
- },
- {
- name: "invalid local reference",
- reference: "flipt://invalid/something:latest",
- expectedErr: errors.New(`unexpected local reference: "invalid/something:latest"`),
- },
- {
- name: "valid local",
- reference: "flipt://local/something:latest",
- expected: Reference{
- Reference: registry.Reference{
- Registry: "local",
- Repository: "something",
- Reference: "latest",
- },
- Scheme: "flipt",
- },
- },
- {
- name: "valid bare local",
- reference: "something:latest",
- expected: Reference{
- Reference: registry.Reference{
- Registry: "local",
- Repository: "something",
- Reference: "latest",
- },
- Scheme: "flipt",
- },
- },
- {
- name: "valid insecure remote",
- reference: "http://remote/something:latest",
- expected: Reference{
- Reference: registry.Reference{
- Registry: "remote",
- Repository: "something",
- Reference: "latest",
- },
- Scheme: "http",
- },
- },
- {
- name: "valid remote",
- reference: "https://remote/something:latest",
- expected: Reference{
- Reference: registry.Reference{
- Registry: "remote",
- Repository: "something",
- Reference: "latest",
- },
- Scheme: "https",
- },
- },
- {
- name: "valid bare remote",
- reference: "remote/something:latest",
- expected: Reference{
- Reference: registry.Reference{
- Registry: "remote",
- Repository: "something",
- Reference: "latest",
- },
- Scheme: "https",
- },
- },
- } {
- t.Run(test.name, func(t *testing.T) {
- ref, err := ParseReference(test.reference)
- if test.expectedErr != nil {
- require.Equal(t, test.expectedErr, err)
- return
- }
-
- require.NoError(t, err)
- assert.Equal(t, test.expected, ref)
- })
- }
-}
-
-func TestStore_Fetch_InvalidMediaType(t *testing.T) {
- dir := testRepository(t,
- layer("default", `{"namespace":"default"}`, "unexpected.media.type"),
- )
-
- ref, err := ParseReference(fmt.Sprintf("flipt://local/%s:latest", repo))
- require.NoError(t, err)
-
- store, err := NewStore(zaptest.NewLogger(t), dir)
- require.NoError(t, err)
-
- ctx := context.Background()
- _, err = store.Fetch(ctx, ref)
- require.EqualError(t, err, "layer \"sha256:85ee577ad99c62f314abca9f43ad87c2ee8818513e6383a77690df56d0352748\": type \"unexpected.media.type\": unexpected media type")
-
- dir = testRepository(t,
- layer("default", `{"namespace":"default"}`, MediaTypeFliptNamespace+"+unknown"),
- )
-
- store, err = NewStore(zaptest.NewLogger(t), dir)
- require.NoError(t, err)
-
- _, err = store.Fetch(ctx, ref)
- require.EqualError(t, err, "layer \"sha256:85ee577ad99c62f314abca9f43ad87c2ee8818513e6383a77690df56d0352748\": unexpected layer encoding: \"unknown\"")
-}
-
-func TestStore_Fetch(t *testing.T) {
- dir := testRepository(t,
- layer("default", `{"namespace":"default"}`, MediaTypeFliptNamespace),
- layer("other", `namespace: other`, MediaTypeFliptNamespace+"+yaml"),
- )
-
- ref, err := ParseReference(fmt.Sprintf("flipt://local/%s:latest", repo))
- require.NoError(t, err)
-
- store, err := NewStore(zaptest.NewLogger(t), dir)
- require.NoError(t, err)
-
- ctx := context.Background()
- resp, err := store.Fetch(ctx, ref)
- require.NoError(t, err)
-
- require.False(t, resp.Matched, "matched an empty digest unexpectedly")
- // should remain consistent with contents
- const manifestDigest = digest.Digest("sha256:7cd89519a7f44605a0964cb96e72fef972ebdc0fa4153adac2e8cd2ed5b0e90a")
- assert.Equal(t, manifestDigest, resp.Digest)
-
- var (
- expected = map[string]string{
- "85ee577ad99c62f314abca9f43ad87c2ee8818513e6383a77690df56d0352748.json": `{"namespace":"default"}`,
- "bbc859ba2a5e9ecc9469a06ae8770b7c0a6e2af2bf16f6bb9184d0244ffd79da.yaml": `namespace: other`,
- }
- found = map[string]string{}
- )
-
- for _, fi := range resp.Files {
- defer fi.Close()
-
- stat, err := fi.Stat()
- require.NoError(t, err)
-
- bytes, err := io.ReadAll(fi)
- require.NoError(t, err)
-
- found[stat.Name()] = string(bytes)
- }
-
- assert.Equal(t, expected, found)
-
- t.Run("IfNoMatch", func(t *testing.T) {
- resp, err = store.Fetch(ctx, ref, IfNoMatch(manifestDigest))
- require.NoError(t, err)
-
- require.True(t, resp.Matched)
- assert.Equal(t, manifestDigest, resp.Digest)
- assert.Empty(t, resp.Files)
- })
-}
-
-//go:embed testdata/*
-var testdata embed.FS
-
-func TestStore_Build(t *testing.T) {
- ctx := context.TODO()
- dir := testRepository(t)
-
- ref, err := ParseReference(fmt.Sprintf("flipt://local/%s:latest", repo))
- require.NoError(t, err)
-
- store, err := NewStore(zaptest.NewLogger(t), dir)
- require.NoError(t, err)
-
- testdata, err := fs.Sub(testdata, "testdata")
- require.NoError(t, err)
-
- bundle, err := store.Build(ctx, testdata, ref)
- require.NoError(t, err)
-
- assert.Equal(t, repo, bundle.Repository)
- assert.Equal(t, "latest", bundle.Tag)
- assert.NotEmpty(t, bundle.Digest)
- assert.NotEmpty(t, bundle.CreatedAt)
-
- resp, err := store.Fetch(ctx, ref)
- require.NoError(t, err)
- require.False(t, resp.Matched)
-
- assert.Len(t, resp.Files, 2)
-}
-
-func TestStore_List(t *testing.T) {
- ctx := context.TODO()
- dir := testRepository(t)
-
- ref, err := ParseReference(fmt.Sprintf("%s:latest", repo))
- require.NoError(t, err)
-
- store, err := NewStore(zaptest.NewLogger(t), dir)
- require.NoError(t, err)
-
- bundles, err := store.List(ctx)
- require.NoError(t, err)
- require.Empty(t, bundles)
-
- testdata, err := fs.Sub(testdata, "testdata")
- require.NoError(t, err)
-
- bundle, err := store.Build(ctx, testdata, ref)
- require.NoError(t, err)
-
- t.Log("bundle created digest:", bundle.Digest)
-
- // sleep long enough for 1 second to pass
- // to bump the timestamp on next build
- time.Sleep(1 * time.Second)
-
- bundle, err = store.Build(ctx, testdata, ref)
- require.NoError(t, err)
-
- t.Log("bundle created digest:", bundle.Digest)
-
- bundles, err = store.List(ctx)
- require.NoError(t, err)
- require.Len(t, bundles, 2)
-
- assert.Equal(t, "latest", bundles[0].Tag)
- assert.Empty(t, bundles[1].Tag)
-}
-
-func TestStore_Copy(t *testing.T) {
- ctx := context.TODO()
- dir := testRepository(t)
-
- src, err := ParseReference("flipt://local/source:latest")
- require.NoError(t, err)
-
- store, err := NewStore(zaptest.NewLogger(t), dir)
- require.NoError(t, err)
-
- testdata, err := fs.Sub(testdata, "testdata")
- require.NoError(t, err)
-
- _, err = store.Build(ctx, testdata, src)
- require.NoError(t, err)
-
- for _, test := range []struct {
- name string
- src string
- dst string
- expectedRepo string
- expectedTag string
- expectedErr error
- }{
- {
- name: "valid",
- src: "flipt://local/source:latest",
- dst: "flipt://local/target:latest",
- expectedRepo: "target",
- expectedTag: "latest",
- },
- {
- name: "invalid source (no reference)",
- src: "flipt://local/source",
- dst: "flipt://local/target:latest",
- expectedErr: fmt.Errorf("source bundle: %w", ErrReferenceRequired),
- },
- {
- name: "invalid destination (no reference)",
- src: "flipt://local/source:latest",
- dst: "flipt://local/target",
- expectedErr: fmt.Errorf("destination bundle: %w", ErrReferenceRequired),
- },
- } {
- t.Run(test.name, func(t *testing.T) {
- src, err := ParseReference(test.src)
- require.NoError(t, err)
-
- dst, err := ParseReference(test.dst)
- require.NoError(t, err)
-
- bundle, err := store.Copy(ctx, src, dst)
- if test.expectedErr != nil {
- require.Equal(t, test.expectedErr, err)
- return
- }
-
- require.NoError(t, err)
-
- assert.Equal(t, test.expectedRepo, bundle.Repository)
- assert.Equal(t, test.expectedTag, bundle.Tag)
- assert.NotEmpty(t, bundle.Digest)
- assert.NotEmpty(t, bundle.CreatedAt)
-
- resp, err := store.Fetch(ctx, dst)
- require.NoError(t, err)
- require.False(t, resp.Matched)
-
- assert.Len(t, resp.Files, 2)
- })
- }
-}
-
-func TestFile(t *testing.T) {
- var (
- rd = strings.NewReader("contents")
- mod = time.Date(2023, 11, 9, 12, 0, 0, 0, time.UTC)
- info = FileInfo{
- desc: v1.Descriptor{
- Digest: digest.FromString("contents"),
- Size: rd.Size(),
- },
- encoding: "json",
- mod: mod,
- }
- fi = File{
- ReadCloser: readCloseSeeker{rd},
- info: info,
- }
- )
-
- stat, err := fi.Stat()
- require.NoError(t, err)
-
- assert.Equal(t, "d1b2a59fbea7e20077af9f91b27e95e865061b270be03ff539ab3b73587882e8.json", stat.Name())
- assert.Equal(t, fs.ModePerm, stat.Mode())
- assert.Equal(t, mod, stat.ModTime())
- assert.False(t, stat.IsDir())
- assert.Nil(t, stat.Sys())
-
- count, err := fi.Seek(3, io.SeekStart)
- require.NoError(t, err)
- assert.Equal(t, int64(3), count)
-
- data, err := io.ReadAll(fi)
- require.NoError(t, err)
- assert.Equal(t, "tents", string(data))
-
- // rewind reader
- _, err = rd.Seek(0, io.SeekStart)
- require.NoError(t, err)
-
- // ensure seeker cannot seek
- fi = File{
- ReadCloser: io.NopCloser(rd),
- info: info,
- }
-
- _, err = fi.Seek(3, io.SeekStart)
- require.EqualError(t, err, "seeker cannot seek")
-}
-
-func TestStore_FetchWithECR(t *testing.T) {
- registryURI, endpoint := testEcrStub(t, time.Second)
-
- dir := testRepository(t)
-
- src, err := ParseReference("flipt://local/source:latest")
- require.NoError(t, err)
-
- store, err := NewStore(zaptest.NewLogger(t), dir, WithAWSECRCredentials(endpoint))
- require.NoError(t, err)
-
- testdata, err := fs.Sub(testdata, "testdata")
- require.NoError(t, err)
-
- _, err = store.Build(context.Background(), testdata, src)
- require.NoError(t, err)
-
- dst, err := ParseReference(fmt.Sprintf("%s/%s:latest", registryURI, repo))
- require.NoError(t, err)
- _, err = store.Copy(context.Background(), src, dst)
- require.NoError(t, err)
-
- _, err = store.Fetch(context.Background(), dst)
- require.NoError(t, err)
- time.Sleep(time.Second)
- _, err = store.Fetch(context.Background(), dst)
- require.NoError(t, err)
-}
-
-type readCloseSeeker struct {
- io.ReadSeeker
-}
-
-func (r readCloseSeeker) Close() error { return nil }
-
-func layer(ns, payload, mediaType string) func(*testing.T, oras.Target) v1.Descriptor {
- return func(t *testing.T, store oras.Target) v1.Descriptor {
- t.Helper()
-
- desc := v1.Descriptor{
- Digest: digest.FromString(payload),
- Size: int64(len(payload)),
- MediaType: mediaType,
- Annotations: map[string]string{
- AnnotationFliptNamespace: ns,
- },
- }
-
- require.NoError(t, store.Push(context.TODO(), desc, bytes.NewReader([]byte(payload))))
-
- return desc
- }
-}
-
-func testRepository(t *testing.T, layerFuncs ...func(*testing.T, oras.Target) v1.Descriptor) (dir string) {
- t.Helper()
-
- dir = t.TempDir()
-
- t.Log("test OCI directory", dir, repo)
-
- store, err := oci.New(path.Join(dir, repo))
- require.NoError(t, err)
-
- store.AutoSaveIndex = true
-
- ctx := context.TODO()
-
- if len(layerFuncs) == 0 {
- return
- }
-
- var layers []v1.Descriptor
- for _, fn := range layerFuncs {
- layers = append(layers, fn(t, store))
- }
-
- desc, err := oras.PackManifest(ctx, store, oras.PackManifestVersion1_1, MediaTypeFliptFeatures, oras.PackManifestOptions{
- ManifestAnnotations: map[string]string{},
- Layers: layers,
- })
- require.NoError(t, err)
-
- require.NoError(t, store.Tag(ctx, desc, "latest"))
-
- return
-}
-
-// testEcrStub is a stub for AWS ECR private service. It allows to get the auth token and
-// push/pull from registry.
-func testEcrStub(t testing.TB, tokenTtl time.Duration) (registry string, endpoint string) {
- t.Helper()
- t.Setenv("AWS_ACCESS_KEY_ID", "key")
- t.Setenv("AWS_SECRET_ACCESS_KEY", "secret")
- token := base64.RawStdEncoding.EncodeToString([]byte("user_name:password"))
-
- authMiddleware := func(next http.Handler) http.Handler {
- return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- if r.Header.Get("Authorization") != "Basic "+token {
- w.Header().Add("Www-Authenticate", "Basic realm=private-ecr")
- w.WriteHeader(http.StatusUnauthorized)
- return
- }
- next.ServeHTTP(w, r)
- })
- }
-
- mux := http.NewServeMux()
- // registry endpoint
- mux.Handle("/v2/", authMiddleware(containerregistry.New(containerregistry.Logger(log.New(io.Discard, "", 0)))))
- // aws ecr endpoint
- mux.HandleFunc("POST /aws-api", func(w http.ResponseWriter, r *http.Request) {
- amzDate := r.Header.Get("X-Amz-Date")
- requestTime, _ := time.Parse("20060102T150405Z", amzDate)
- requestTime = requestTime.Add(tokenTtl)
- amzTarget := r.Header.Get("X-Amz-Target")
- switch amzTarget {
- case "AmazonEC2ContainerRegistry_V20150921.GetAuthorizationToken":
- w.WriteHeader(http.StatusOK)
- _, _ = w.Write([]byte(fmt.Sprintf(`{"authorizationData": [{"authorizationToken": "%s", "expiresAt": %d}]}`, token, requestTime.Unix())))
- default:
- w.WriteHeader(http.StatusBadRequest)
- }
- })
- httpServer := httptest.NewServer(mux)
- t.Cleanup(httpServer.Close)
- registry = httpServer.URL
- endpoint = httpServer.URL + "/aws-api"
- return
-}
diff --git a/internal/oci/mock_credentialFunc.go b/internal/oci/mock_credentialFunc.go
deleted file mode 100644
index 33be42f7a3..0000000000
--- a/internal/oci/mock_credentialFunc.go
+++ /dev/null
@@ -1,47 +0,0 @@
-// Code generated by mockery v2.42.1. DO NOT EDIT.
-
-package oci
-
-import (
- mock "github.com/stretchr/testify/mock"
- auth "oras.land/oras-go/v2/registry/remote/auth"
-)
-
-// mockCredentialFunc is an autogenerated mock type for the credentialFunc type
-type mockCredentialFunc struct {
- mock.Mock
-}
-
-// Execute provides a mock function with given fields: registry
-func (_m *mockCredentialFunc) Execute(registry string) auth.CredentialFunc {
- ret := _m.Called(registry)
-
- if len(ret) == 0 {
- panic("no return value specified for Execute")
- }
-
- var r0 auth.CredentialFunc
- if rf, ok := ret.Get(0).(func(string) auth.CredentialFunc); ok {
- r0 = rf(registry)
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).(auth.CredentialFunc)
- }
- }
-
- return r0
-}
-
-// newMockCredentialFunc creates a new instance of mockCredentialFunc. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
-// The first argument is typically a *testing.T value.
-func newMockCredentialFunc(t interface {
- mock.TestingT
- Cleanup(func())
-}) *mockCredentialFunc {
- mock := &mockCredentialFunc{}
- mock.Mock.Test(t)
-
- t.Cleanup(func() { mock.AssertExpectations(t) })
-
- return mock
-}
diff --git a/internal/oci/oci.go b/internal/oci/oci.go
deleted file mode 100644
index 2be6ffab4a..0000000000
--- a/internal/oci/oci.go
+++ /dev/null
@@ -1,26 +0,0 @@
-package oci
-
-import "errors"
-
-const (
- // MediaTypeFliptFeatures is the OCI media type for a flipt features artifact
- MediaTypeFliptFeatures = "application/vnd.io.flipt.features.v1"
- // MediaTypeFliptNamespace is the OCI media type for a flipt features namespace artifact
- MediaTypeFliptNamespace = "application/vnd.io.flipt.features.namespace.v1"
-
- // AnnotationFliptNamespace is an OCI annotation key which identifies the namespace key
- // of the annotated flipt namespace artifact
- AnnotationFliptNamespace = "io.flipt.features.namespace"
-)
-
-var (
- // ErrMissingMediaType is returned when a descriptor is presented
- // without a media type
- ErrMissingMediaType = errors.New("missing media type")
- // ErrUnexpectedMediaType is returned when an unexpected media type
- // is found on a target manifest or descriptor
- ErrUnexpectedMediaType = errors.New("unexpected media type")
- // ErrReferenceRequired is returned when a referenced is required for
- // a particular operation
- ErrReferenceRequired = errors.New("reference required")
-)
diff --git a/internal/oci/options.go b/internal/oci/options.go
deleted file mode 100644
index 0229e1cd16..0000000000
--- a/internal/oci/options.go
+++ /dev/null
@@ -1,81 +0,0 @@
-package oci
-
-import (
- "fmt"
-
- "go.flipt.io/flipt/internal/containers"
- "go.flipt.io/flipt/internal/oci/ecr"
- "oras.land/oras-go/v2"
- "oras.land/oras-go/v2/registry/remote/auth"
-)
-
-type AuthenticationType string
-
-const (
- AuthenticationTypeStatic AuthenticationType = "static"
- AuthenticationTypeAWSECR AuthenticationType = "aws-ecr"
-)
-
-func (s AuthenticationType) IsValid() bool {
- switch s {
- case AuthenticationTypeStatic, AuthenticationTypeAWSECR:
- return true
- }
-
- return false
-}
-
-// StoreOptions are used to configure call to NewStore
-// This shouldn't be handled directory, instead use one of the function options
-// e.g. WithBundleDir or WithCredentials
-type StoreOptions struct {
- bundleDir string
- manifestVersion oras.PackManifestVersion
- auth credentialFunc
- authCache auth.Cache
-}
-
-// WithCredentials configures username and password credentials used for authenticating
-// with remote registries
-func WithCredentials(kind AuthenticationType, user, pass string) (containers.Option[StoreOptions], error) {
- switch kind {
- case AuthenticationTypeAWSECR:
- return WithAWSECRCredentials(""), nil
- case AuthenticationTypeStatic:
- return WithStaticCredentials(user, pass), nil
- default:
- return nil, fmt.Errorf("unsupported auth type %s", kind)
- }
-}
-
-// WithStaticCredentials configures username and password credentials used for authenticating
-// with remote registries
-func WithStaticCredentials(user, pass string) containers.Option[StoreOptions] {
- return func(so *StoreOptions) {
- so.auth = func(registry string) auth.CredentialFunc {
- return auth.StaticCredential(registry, auth.Credential{
- Username: user,
- Password: pass,
- })
- }
- so.authCache = auth.DefaultCache
- }
-}
-
-// WithAWSECRCredentials configures username and password credentials used for authenticating
-// with remote registries
-func WithAWSECRCredentials(endpoint string) containers.Option[StoreOptions] {
- return func(so *StoreOptions) {
- store := ecr.NewCredentialsStore(endpoint)
- so.auth = func(registry string) auth.CredentialFunc {
- return ecr.Credential(store)
- }
- }
-}
-
-// WithManifestVersion configures what OCI Manifest version to build the bundle.
-func WithManifestVersion(version oras.PackManifestVersion) containers.Option[StoreOptions] {
- return func(s *StoreOptions) {
- s.manifestVersion = version
- }
-}
diff --git a/internal/oci/options_test.go b/internal/oci/options_test.go
deleted file mode 100644
index 1641424a58..0000000000
--- a/internal/oci/options_test.go
+++ /dev/null
@@ -1,47 +0,0 @@
-package oci
-
-import (
- "testing"
-
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
- "oras.land/oras-go/v2"
-)
-
-func TestWithCredentials(t *testing.T) {
- for _, tt := range []struct {
- kind AuthenticationType
- user string
- pass string
- expectedError string
- }{
- {kind: AuthenticationTypeStatic, user: "u", pass: "p"},
- {kind: AuthenticationTypeAWSECR},
- {kind: AuthenticationType("unknown"), expectedError: "unsupported auth type unknown"},
- } {
- t.Run(string(tt.kind), func(t *testing.T) {
- o := &StoreOptions{}
- opt, err := WithCredentials(tt.kind, tt.user, tt.pass)
- if tt.expectedError != "" {
- assert.EqualError(t, err, tt.expectedError)
- } else {
- require.NoError(t, err)
- opt(o)
- assert.NotNil(t, o.auth)
- assert.NotNil(t, o.auth("test"))
- }
- })
- }
-}
-
-func TestWithManifestVersion(t *testing.T) {
- o := &StoreOptions{}
- WithManifestVersion(oras.PackManifestVersion1_1)(o)
- assert.Equal(t, oras.PackManifestVersion1_1, o.manifestVersion)
-}
-
-func TestAuthenicationTypeIsValid(t *testing.T) {
- assert.True(t, AuthenticationTypeStatic.IsValid())
- assert.True(t, AuthenticationTypeAWSECR.IsValid())
- assert.False(t, AuthenticationType("").IsValid())
-}
diff --git a/internal/oci/testdata/.flipt.yml b/internal/oci/testdata/.flipt.yml
deleted file mode 100644
index c2103e459b..0000000000
--- a/internal/oci/testdata/.flipt.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-version: 1.0
-include:
- - default.yml
- - production.yml
diff --git a/internal/oci/testdata/default.yml b/internal/oci/testdata/default.yml
deleted file mode 100644
index bf4ce6fa6a..0000000000
--- a/internal/oci/testdata/default.yml
+++ /dev/null
@@ -1,25 +0,0 @@
-flags:
-- key: flag_boolean
- name: FLAG_BOOLEAN
- type: BOOLEAN_FLAG_TYPE
- description: Boolean Flag Description
- enabled: false
- rollouts:
- - description: enabled for segment_001
- segment:
- key: segment_001
- value: true
- - description: disabled for segment_002
- segment:
- key: segment_002
- - description: enabled for segment_003
- segment:
- key: segment_003
- value: true
- - description: disabled for segment_004
- segment:
- key: segment_004
- - description: enabled for 50%
- threshold:
- percentage: 50
- value: true
diff --git a/internal/oci/testdata/production.yml b/internal/oci/testdata/production.yml
deleted file mode 100644
index ae481e0548..0000000000
--- a/internal/oci/testdata/production.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-namespace: production
-flags:
-- key: flag_boolean
- name: FLAG_BOOLEAN
- type: BOOLEAN_FLAG_TYPE
- description: Boolean Flag Description
- enabled: false
- rollouts:
- - description: enabled for segment_001
- segment:
- key: segment_001
- value: true
- - description: disabled for segment_002
- segment:
- key: segment_002
- - description: enabled for segment_003
- segment:
- key: segment_003
- value: true
- - description: disabled for segment_004
- segment:
- key: segment_004
- - description: enabled for 50%
- threshold:
- percentage: 50
- value: true
diff --git a/internal/server/analytics/mock_Client_test.go b/internal/server/analytics/mock_client_test.go
similarity index 100%
rename from internal/server/analytics/mock_Client_test.go
rename to internal/server/analytics/mock_client_test.go
diff --git a/internal/server/analytics/testing/testing.go b/internal/server/analytics/testing/testing.go
index 52892b05f6..343b6c8c8e 100644
--- a/internal/server/analytics/testing/testing.go
+++ b/internal/server/analytics/testing/testing.go
@@ -13,7 +13,7 @@ import (
"github.com/golang-migrate/migrate/v4/source/iofs"
"github.com/testcontainers/testcontainers-go"
"github.com/testcontainers/testcontainers-go/wait"
- "go.flipt.io/flipt/config/migrations"
+ "go.flipt.io/flipt/internal/migrations"
fliptsql "go.flipt.io/flipt/internal/storage/sql"
)
diff --git a/internal/server/audit/checker.go b/internal/server/audit/checker.go
deleted file mode 100644
index 7e16a5c9b1..0000000000
--- a/internal/server/audit/checker.go
+++ /dev/null
@@ -1,111 +0,0 @@
-package audit
-
-import (
- "errors"
- "fmt"
- "strings"
-)
-
-// EventPairChecker is the contract for checking if an event pair exists and if it should be emitted to configured sinks.
-type EventPairChecker interface {
- Check(eventPair string) bool
- Events() []string
-}
-
-// Checker holds a map that maps event pairs to a dummy struct. It is basically
-// used as a set to check for existence.
-type Checker struct {
- eventActions map[string]struct{}
-}
-
-// NewChecker is the constructor for a Checker.
-func NewChecker(eventPairs []string) (*Checker, error) {
- nouns := map[string][]string{
- "constraint": {"constraint"},
- "distribution": {"distribution"},
- "flag": {"flag"},
- "namespace": {"namespace"},
- "rollout": {"rollout"},
- "rule": {"rule"},
- "segment": {"segment"},
- "token": {"token"},
- "variant": {"variant"},
- "*": {"constraint", "distribution", "flag", "namespace", "rollout", "rule", "segment", "token", "variant"},
- }
-
- verbs := map[string][]string{
- "created": {"created"},
- "deleted": {"deleted"},
- "updated": {"updated"},
- "*": {"created", "deleted", "updated"},
- }
-
- eventActions := make(map[string]struct{})
- for _, ep := range eventPairs {
- epSplit := strings.Split(ep, ":")
- if len(epSplit) < 2 {
- return nil, fmt.Errorf("invalid event pair: %s", ep)
- }
-
- eventNouns, ok := nouns[epSplit[0]]
- if !ok {
- return nil, fmt.Errorf("invalid noun: %s", epSplit[0])
- }
-
- eventVerbs, ok := verbs[epSplit[1]]
- if !ok {
- return nil, fmt.Errorf("invalid verb: %s", epSplit[1])
- }
-
- for _, en := range eventNouns {
- for _, ev := range eventVerbs {
- eventPair := fmt.Sprintf("%s:%s", en, ev)
-
- _, ok := eventActions[eventPair]
- if ok {
- return nil, fmt.Errorf("repeated event pair: %s", eventPair)
- }
-
- eventActions[eventPair] = struct{}{}
- }
- }
- }
-
- if len(eventActions) == 0 {
- return nil, errors.New("no event pairs exist")
- }
-
- return &Checker{
- eventActions: eventActions,
- }, nil
-}
-
-// Check checks if an event pair exists in the Checker data structure for event emission.
-func (c *Checker) Check(eventPair string) bool {
- if c == nil || c.eventActions == nil {
- return false
- }
-
- _, ok := c.eventActions[eventPair]
- return ok
-}
-
-// Events returns the type of events we would like to emit to configured sinks.
-func (c *Checker) Events() []string {
- var events = make([]string, 0, len(c.eventActions))
- for k := range c.eventActions {
- events = append(events, k)
- }
-
- return events
-}
-
-type NoOpChecker struct{}
-
-func (n *NoOpChecker) Check(eventPair string) bool {
- return false
-}
-
-func (n *NoOpChecker) Events() []string {
- return []string{}
-}
diff --git a/internal/server/audit/checker_test.go b/internal/server/audit/checker_test.go
deleted file mode 100644
index dc1a3cb409..0000000000
--- a/internal/server/audit/checker_test.go
+++ /dev/null
@@ -1,137 +0,0 @@
-package audit
-
-import (
- "fmt"
- "testing"
-
- "github.com/stretchr/testify/assert"
-)
-
-func TestChecker(t *testing.T) {
- testCases := []struct {
- name string
- eventPairs []string
- expectedError error
- pairs map[string]bool
- }{
- {
- name: "wild card for nouns",
- eventPairs: []string{"*:created"},
- expectedError: nil,
- pairs: map[string]bool{
- "constraint:created": true,
- "distribution:created": true,
- "flag:created": true,
- "namespace:created": true,
- "rollout:created": true,
- "rule:created": true,
- "segment:created": true,
- "token:created": true,
- "variant:created": true,
- "constraint:deleted": false,
- "distribution:deleted": false,
- "flag:deleted": false,
- "namespace:deleted": false,
- "rollout:deleted": false,
- "rule:deleted": false,
- "segment:deleted": false,
- "token:deleted": false,
- "variant:deleted": false,
- "constraint:updated": false,
- "distribution:updated": false,
- "flag:updated": false,
- "namespace:updated": false,
- "rollout:updated": false,
- "rule:updated": false,
- "segment:updated": false,
- "variant:updated": false,
- },
- },
- {
- name: "wild card for verbs",
- eventPairs: []string{"flag:*"},
- expectedError: nil,
- pairs: map[string]bool{
- "constraint:created": false,
- "distribution:created": false,
- "flag:created": true,
- "namespace:created": false,
- "rollout:created": false,
- "rule:created": false,
- "segment:created": false,
- "token:created": false,
- "variant:created": false,
- "constraint:deleted": false,
- "distribution:deleted": false,
- "flag:deleted": true,
- "namespace:deleted": false,
- "rollout:deleted": false,
- "rule:deleted": false,
- "segment:deleted": false,
- "token:deleted": false,
- "variant:deleted": false,
- "constraint:updated": false,
- "distribution:updated": false,
- "flag:updated": true,
- "namespace:updated": false,
- "rollout:updated": false,
- "rule:updated": false,
- "segment:updated": false,
- "variant:updated": false,
- },
- },
- {
- name: "single pair",
- eventPairs: []string{"flag:created"},
- expectedError: nil,
- pairs: map[string]bool{
- "constraint:created": false,
- "distribution:created": false,
- "flag:created": true,
- "namespace:created": false,
- "rollout:created": false,
- "rule:created": false,
- "segment:created": false,
- "token:created": false,
- "variant:created": false,
- "constraint:deleted": false,
- "distribution:deleted": false,
- "flag:deleted": false,
- "namespace:deleted": false,
- "rollout:deleted": false,
- "rule:deleted": false,
- "segment:deleted": false,
- "token:deleted": false,
- "variant:deleted": false,
- "constraint:updated": false,
- "distribution:updated": false,
- "flag:updated": false,
- "namespace:updated": false,
- "rollout:updated": false,
- "rule:updated": false,
- "segment:updated": false,
- "variant:updated": false,
- },
- },
- {
- name: "error repeating event pairs",
- eventPairs: []string{"*:created", "flag:created"},
- expectedError: fmt.Errorf("repeated event pair: %s", "flag:created"),
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.name, func(t *testing.T) {
- checker, err := NewChecker(tc.eventPairs)
- if tc.expectedError != nil {
- assert.EqualError(t, err, tc.expectedError.Error())
- return
- }
-
- for k, v := range tc.pairs {
- actual := checker.Check(k)
- assert.Equal(t, v, actual)
- }
- })
- }
-}
diff --git a/internal/server/audit/cloud/cloud.go b/internal/server/audit/cloud/cloud.go
deleted file mode 100644
index aff71654f5..0000000000
--- a/internal/server/audit/cloud/cloud.go
+++ /dev/null
@@ -1,70 +0,0 @@
-package cloud
-
-import (
- "context"
- "fmt"
- "time"
-
- "github.com/hashicorp/go-multierror"
- "go.flipt.io/flipt/internal/server/audit"
- "go.flipt.io/flipt/internal/server/audit/template"
- "go.uber.org/zap"
-)
-
-const sinkType = "cloud"
-
-type Sink struct {
- logger *zap.Logger
-
- executer template.Executer
-}
-
-// NewSink is the constructor for a Sink.
-func NewSink(logger *zap.Logger, apiKey string, url string) (audit.Sink, error) {
- const body = `{
- "type": "{{ .Type }}",
- "action": "{{ .Action }}",
- "actor": {{ toJson .Metadata.Actor }},
- "payload": {{ toJson .Payload }}
- }`
-
- headers := map[string]string{
- "Authorization": fmt.Sprintf("Bearer %s", apiKey),
- "Content-Type": "application/json",
- }
-
- executer, err := template.NewWebhookTemplate(logger, url, body, headers, 15*time.Second)
- if err != nil {
- return nil, fmt.Errorf("failed to create webhook template sink: %w", err)
- }
-
- return &Sink{
- logger: logger.With(zap.String("sink", sinkType)),
- executer: executer,
- }, nil
-}
-
-// Close implements audit.Sink.
-func (s *Sink) Close() error {
- return nil
-}
-
-// SendAudits implements audit.Sink.
-func (s *Sink) SendAudits(ctx context.Context, events []audit.Event) error {
- var result error
-
- for _, e := range events {
- err := s.executer.Execute(ctx, e)
- if err != nil {
- s.logger.Error("failed to send audit to webhook", zap.Error(err))
- result = multierror.Append(result, err)
- }
- }
-
- return result
-}
-
-// String implements audit.Sink.
-func (s *Sink) String() string {
- return sinkType
-}
diff --git a/internal/server/audit/cloud/cloud_test.go b/internal/server/audit/cloud/cloud_test.go
deleted file mode 100644
index 72b99d042d..0000000000
--- a/internal/server/audit/cloud/cloud_test.go
+++ /dev/null
@@ -1,42 +0,0 @@
-package cloud
-
-import (
- "context"
- "testing"
-
- "github.com/stretchr/testify/require"
- "go.flipt.io/flipt/internal/server/audit"
- "go.flipt.io/flipt/rpc/flipt"
- "go.uber.org/zap"
-)
-
-type dummyExecuter struct{}
-
-func (d *dummyExecuter) Execute(_ context.Context, _ audit.Event) error {
- return nil
-}
-
-func TestSink(t *testing.T) {
- var s audit.Sink = &Sink{
- logger: zap.NewNop(),
- executer: &dummyExecuter{},
- }
-
- require.Equal(t, "cloud", s.String())
-
- err := s.SendAudits(context.TODO(), []audit.Event{
- {
- Version: "0.1",
- Type: string(flipt.SubjectFlag),
- Action: string(flipt.ActionCreate),
- },
- {
- Version: "0.1",
- Type: string(flipt.SubjectConstraint),
- Action: string(flipt.ActionUpdate),
- },
- })
-
- require.NoError(t, err)
- require.NoError(t, s.Close())
-}
diff --git a/internal/server/authn/server.go b/internal/server/authn/server.go
index 74aee16bac..735eaf2d46 100644
--- a/internal/server/authn/server.go
+++ b/internal/server/authn/server.go
@@ -99,13 +99,6 @@ type Server struct {
type Option func(*Server)
-// WithAuditLoggingEnabled sets the option for enabling audit logging for the auth server.
-func WithAuditLoggingEnabled(enabled bool) Option {
- return func(s *Server) {
- s.enableAuditLogging = enabled
- }
-}
-
func NewServer(logger *zap.Logger, store storageauth.Store, opts ...Option) *Server {
s := &Server{
logger: logger,
diff --git a/internal/server/authn/server_test.go b/internal/server/authn/server_test.go
index 2d0dc9b118..bc52aac443 100644
--- a/internal/server/authn/server_test.go
+++ b/internal/server/authn/server_test.go
@@ -64,7 +64,7 @@ func TestServer(t *testing.T) {
defer shutdown(t)
- rpcauth.RegisterAuthenticationServiceServer(server, authn.NewServer(logger, store, authn.WithAuditLoggingEnabled(true)))
+ rpcauth.RegisterAuthenticationServiceServer(server, authn.NewServer(logger, store))
go func() {
errC <- server.Serve(listener)
diff --git a/internal/server/middleware/grpc/middleware.go b/internal/server/middleware/grpc/middleware.go
index 3fb967dc24..d56e029976 100644
--- a/internal/server/middleware/grpc/middleware.go
+++ b/internal/server/middleware/grpc/middleware.go
@@ -237,7 +237,7 @@ func EvaluationUnaryInterceptor(analyticsEnabled bool) grpc.UnaryServerIntercept
}
// AuditEventUnaryInterceptor captures events and adds them to the trace span to be consumed downstream.
-func AuditEventUnaryInterceptor(logger *zap.Logger, eventPairChecker audit.EventPairChecker) grpc.UnaryServerInterceptor {
+func AuditEventUnaryInterceptor(logger *zap.Logger) grpc.UnaryServerInterceptor {
return func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) {
var requests []flipt.Request
r, ok := req.(flipt.Requester)
@@ -254,13 +254,9 @@ func AuditEventUnaryInterceptor(logger *zap.Logger, eventPairChecker audit.Event
defer func() {
for _, event := range events {
- eventPair := fmt.Sprintf("%s:%s", event.Type, event.Action)
- exists := eventPairChecker.Check(eventPair)
- if exists {
- span := trace.SpanFromContext(ctx)
- span.AddEvent("event", trace.WithAttributes(event.DecodeToAttributes()...))
- }
+ span := trace.SpanFromContext(ctx)
+ span.AddEvent("event", trace.WithAttributes(event.DecodeToAttributes()...))
}
}()
diff --git a/internal/server/middleware/grpc/middleware_test.go b/internal/server/middleware/grpc/middleware_test.go
index 1f3fab01ea..c3eedef769 100644
--- a/internal/server/middleware/grpc/middleware_test.go
+++ b/internal/server/middleware/grpc/middleware_test.go
@@ -358,19 +358,6 @@ func TestEvaluationUnaryInterceptor_BatchEvaluation(t *testing.T) {
// check that the requestID was propagated
assert.NotEmpty(t, resp.RequestId)
assert.Equal(t, "bar", resp.RequestId)
-
- // TODO(yquansah): flakey assertion
- // assert.NotZero(t, resp.RequestDurationMillis)
-}
-
-type checkerDummy struct{}
-
-func (c *checkerDummy) Check(e string) bool {
- return true
-}
-
-func (c *checkerDummy) Events() []string {
- return []string{"event"}
}
func TestAuditUnaryInterceptor_CreateFlag(t *testing.T) {
@@ -392,7 +379,7 @@ func TestAuditUnaryInterceptor_CreateFlag(t *testing.T) {
Description: req.Description,
}, nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.CreateFlag(ctx, r.(*flipt.CreateFlagRequest))
@@ -438,7 +425,7 @@ func TestAuditUnaryInterceptor_UpdateFlag(t *testing.T) {
Enabled: req.Enabled,
}, nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.UpdateFlag(ctx, r.(*flipt.UpdateFlagRequest))
@@ -476,7 +463,7 @@ func TestAuditUnaryInterceptor_DeleteFlag(t *testing.T) {
store.On("DeleteFlag", mock.Anything, req).Return(nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.DeleteFlag(ctx, r.(*flipt.DeleteFlagRequest))
@@ -523,7 +510,7 @@ func TestAuditUnaryInterceptor_CreateVariant(t *testing.T) {
Attachment: req.Attachment,
}, nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.CreateVariant(ctx, r.(*flipt.CreateVariantRequest))
@@ -571,7 +558,7 @@ func TestAuditUnaryInterceptor_UpdateVariant(t *testing.T) {
Attachment: req.Attachment,
}, nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.UpdateVariant(ctx, r.(*flipt.UpdateVariantRequest))
@@ -608,7 +595,7 @@ func TestAuditUnaryInterceptor_DeleteVariant(t *testing.T) {
store.On("DeleteVariant", mock.Anything, req).Return(nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.DeleteVariant(ctx, r.(*flipt.DeleteVariantRequest))
@@ -653,7 +640,7 @@ func TestAuditUnaryInterceptor_CreateDistribution(t *testing.T) {
Rollout: req.Rollout,
}, nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.CreateDistribution(ctx, r.(*flipt.CreateDistributionRequest))
@@ -699,7 +686,7 @@ func TestAuditUnaryInterceptor_UpdateDistribution(t *testing.T) {
Rollout: req.Rollout,
}, nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.UpdateDistribution(ctx, r.(*flipt.UpdateDistributionRequest))
@@ -739,7 +726,7 @@ func TestAuditUnaryInterceptor_DeleteDistribution(t *testing.T) {
store.On("DeleteDistribution", mock.Anything, req).Return(nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.DeleteDistribution(ctx, r.(*flipt.DeleteDistributionRequest))
@@ -784,7 +771,7 @@ func TestAuditUnaryInterceptor_CreateSegment(t *testing.T) {
MatchType: req.MatchType,
}, nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.CreateSegment(ctx, r.(*flipt.CreateSegmentRequest))
@@ -829,7 +816,7 @@ func TestAuditUnaryInterceptor_UpdateSegment(t *testing.T) {
MatchType: req.MatchType,
}, nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.UpdateSegment(ctx, r.(*flipt.UpdateSegmentRequest))
@@ -866,7 +853,7 @@ func TestAuditUnaryInterceptor_DeleteSegment(t *testing.T) {
store.On("DeleteSegment", mock.Anything, req).Return(nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.DeleteSegment(ctx, r.(*flipt.DeleteSegmentRequest))
@@ -914,7 +901,7 @@ func TestAuditUnaryInterceptor_CreateConstraint(t *testing.T) {
Value: req.Value,
}, nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.CreateConstraint(ctx, r.(*flipt.CreateConstraintRequest))
@@ -963,7 +950,7 @@ func TestAuditUnaryInterceptor_UpdateConstraint(t *testing.T) {
Value: req.Value,
}, nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.UpdateConstraint(ctx, r.(*flipt.UpdateConstraintRequest))
@@ -1001,7 +988,7 @@ func TestAuditUnaryInterceptor_DeleteConstraint(t *testing.T) {
store.On("DeleteConstraint", mock.Anything, req).Return(nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.DeleteConstraint(ctx, r.(*flipt.DeleteConstraintRequest))
@@ -1050,7 +1037,7 @@ func TestAuditUnaryInterceptor_CreateRollout(t *testing.T) {
FlagKey: req.FlagKey,
}, nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.CreateRollout(ctx, r.(*flipt.CreateRolloutRequest))
@@ -1092,7 +1079,7 @@ func TestAuditUnaryInterceptor_UpdateRollout(t *testing.T) {
Rank: 1,
}, nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.UpdateRollout(ctx, r.(*flipt.UpdateRolloutRequest))
@@ -1131,7 +1118,7 @@ func TestAuditUnaryInterceptor_OrderRollout(t *testing.T) {
store.On("OrderRollouts", mock.Anything, req).Return(nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.OrderRollouts(ctx, r.(*flipt.OrderRolloutsRequest))
@@ -1168,7 +1155,7 @@ func TestAuditUnaryInterceptor_DeleteRollout(t *testing.T) {
store.On("DeleteRollout", mock.Anything, req).Return(nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.DeleteRollout(ctx, r.(*flipt.DeleteRolloutRequest))
@@ -1211,7 +1198,7 @@ func TestAuditUnaryInterceptor_CreateRule(t *testing.T) {
FlagKey: req.FlagKey,
}, nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.CreateRule(ctx, r.(*flipt.CreateRuleRequest))
@@ -1254,7 +1241,7 @@ func TestAuditUnaryInterceptor_UpdateRule(t *testing.T) {
FlagKey: req.FlagKey,
}, nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.UpdateRule(ctx, r.(*flipt.UpdateRuleRequest))
@@ -1292,7 +1279,7 @@ func TestAuditUnaryInterceptor_OrderRule(t *testing.T) {
store.On("OrderRules", mock.Anything, req).Return(nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.OrderRules(ctx, r.(*flipt.OrderRulesRequest))
@@ -1330,7 +1317,7 @@ func TestAuditUnaryInterceptor_DeleteRule(t *testing.T) {
store.On("DeleteRule", mock.Anything, req).Return(nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.DeleteRule(ctx, r.(*flipt.DeleteRuleRequest))
@@ -1371,7 +1358,7 @@ func TestAuditUnaryInterceptor_CreateNamespace(t *testing.T) {
Name: req.Name,
}, nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.CreateNamespace(ctx, r.(*flipt.CreateNamespaceRequest))
@@ -1414,7 +1401,7 @@ func TestAuditUnaryInterceptor_UpdateNamespace(t *testing.T) {
Description: req.Description,
}, nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.UpdateNamespace(ctx, r.(*flipt.UpdateNamespaceRequest))
@@ -1457,7 +1444,7 @@ func TestAuditUnaryInterceptor_DeleteNamespace(t *testing.T) {
store.On("DeleteNamespace", mock.Anything, req).Return(nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.DeleteNamespace(ctx, r.(*flipt.DeleteNamespaceRequest))
@@ -1500,7 +1487,7 @@ func TestAuthMetadataAuditUnaryInterceptor(t *testing.T) {
Description: req.Description,
}, nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.CreateFlag(ctx, r.(*flipt.CreateFlagRequest))
@@ -1554,7 +1541,7 @@ func TestAuditUnaryInterceptor_CreateToken(t *testing.T) {
"email": "example@flipt.io",
}}, nil)
- unaryInterceptor := AuditEventUnaryInterceptor(logger, &checkerDummy{})
+ unaryInterceptor := AuditEventUnaryInterceptor(logger)
handler := func(ctx context.Context, r interface{}) (interface{}, error) {
return s.CreateToken(ctx, r.(*authrpc.CreateTokenRequest))
diff --git a/internal/storage/authn/sql/store.go b/internal/storage/authn/sql/store.go
deleted file mode 100644
index ffbfeefed7..0000000000
--- a/internal/storage/authn/sql/store.go
+++ /dev/null
@@ -1,349 +0,0 @@
-package sql
-
-import (
- "context"
- "fmt"
- "strconv"
- "time"
-
- sq "github.com/Masterminds/squirrel"
-
- "github.com/google/uuid"
- "go.flipt.io/flipt/internal/storage"
- storageauth "go.flipt.io/flipt/internal/storage/authn"
- storagesql "go.flipt.io/flipt/internal/storage/sql"
- rpcauth "go.flipt.io/flipt/rpc/flipt/auth"
- "go.uber.org/zap"
- "google.golang.org/protobuf/types/known/timestamppb"
-)
-
-// Store is the persistent storage layer for Authentications backed by SQL
-// based relational database systems.
-type Store struct {
- logger *zap.Logger
- driver storagesql.Driver
- builder sq.StatementBuilderType
-
- now func() *timestamppb.Timestamp
-
- generateID func() string
- generateToken func() string
-}
-
-// Option is a type which configures a *Store
-type Option func(*Store)
-
-// NewStore constructs and configures a new instance of *Store.
-// Queries are issued to the database via the provided statement builder.
-func NewStore(driver storagesql.Driver, builder sq.StatementBuilderType, logger *zap.Logger, opts ...Option) *Store {
- store := &Store{
- logger: logger,
- driver: driver,
- builder: builder,
- now: func() *timestamppb.Timestamp {
- // we truncate timestamps to the microsecond to support Postgres/MySQL
- // the lowest common denominators in terms of timestamp precision
- now := time.Now().UTC().Truncate(time.Microsecond)
- return timestamppb.New(now)
- },
- generateID: uuid.NewString,
- generateToken: storageauth.GenerateRandomToken,
- }
-
- for _, opt := range opts {
- opt(store)
- }
-
- return store
-}
-
-// WithNowFunc overrides the stores now() function used to obtain
-// a protobuf timestamp representative of the current time of evaluation.
-func WithNowFunc(fn func() *timestamppb.Timestamp) Option {
- return func(s *Store) {
- s.now = fn
- }
-}
-
-// WithTokenGeneratorFunc overrides the stores token generator function
-// used to generate new random token strings as client tokens, when
-// creating new instances of Authentication.
-// The default is a pseudo-random string of bytes base64 encoded.
-func WithTokenGeneratorFunc(fn func() string) Option {
- return func(s *Store) {
- s.generateToken = fn
- }
-}
-
-// WithIDGeneratorFunc overrides the stores ID generator function
-// used to generate new random ID strings, when creating new instances
-// of Authentications.
-// The default is a string containing a valid UUID (V4).
-func WithIDGeneratorFunc(fn func() string) Option {
- return func(s *Store) {
- s.generateID = fn
- }
-}
-
-// CreateAuthentication creates and persists an instance of an Authentication.
-func (s *Store) CreateAuthentication(ctx context.Context, r *storageauth.CreateAuthenticationRequest) (string, *rpcauth.Authentication, error) {
- var (
- now = s.now()
- clientToken = r.ClientToken
- authentication = rpcauth.Authentication{
- Id: s.generateID(),
- Method: r.Method,
- Metadata: r.Metadata,
- ExpiresAt: r.ExpiresAt,
- CreatedAt: now,
- UpdatedAt: now,
- }
- )
-
- // if no client token is provided, generate a new one
- if clientToken == "" {
- clientToken = s.generateToken()
- }
-
- hashedToken, err := storageauth.HashClientToken(clientToken)
- if err != nil {
- return "", nil, fmt.Errorf("creating authentication: %w", err)
- }
-
- if _, err := s.builder.Insert("authentications").
- Columns(
- "id",
- "hashed_client_token",
- "method",
- "metadata",
- "expires_at",
- "created_at",
- "updated_at",
- ).
- Values(
- &authentication.Id,
- &hashedToken,
- &authentication.Method,
- &storagesql.JSONField[map[string]string]{T: authentication.Metadata},
- &storagesql.NullableTimestamp{Timestamp: authentication.ExpiresAt},
- &storagesql.Timestamp{Timestamp: authentication.CreatedAt},
- &storagesql.Timestamp{Timestamp: authentication.UpdatedAt},
- ).
- ExecContext(ctx); err != nil {
- return "", nil, fmt.Errorf(
- "inserting authentication %q: %w",
- authentication.Id,
- s.driver.AdaptError(err),
- )
- }
-
- return clientToken, &authentication, nil
-}
-
-// GetAuthenticationByClientToken fetches the associated Authentication for the provided clientToken string.
-//
-// Given a row is present for the hash of the clientToken then materialize into an Authentication.
-// Else, given it cannot be located, a storage.ErrNotFound error is wrapped and returned instead.
-func (s *Store) GetAuthenticationByClientToken(ctx context.Context, clientToken string) (*rpcauth.Authentication, error) {
- hashedToken, err := storageauth.HashClientToken(clientToken)
- if err != nil {
- return nil, fmt.Errorf("getting authentication by token: %w", err)
- }
-
- var authentication rpcauth.Authentication
-
- if err := s.scanAuthentication(
- s.builder.
- Select(
- "id",
- "method",
- "metadata",
- "expires_at",
- "created_at",
- "updated_at",
- ).
- From("authentications").
- Where(sq.Eq{"hashed_client_token": hashedToken}).
- QueryRowContext(ctx), &authentication); err != nil {
- return nil, fmt.Errorf(
- "getting authentication by token: %w",
- s.driver.AdaptError(err),
- )
- }
-
- return &authentication, nil
-}
-
-// GetAuthenticationByID retrieves an instance of Authentication from the backing
-// store using the provided id string.
-func (s *Store) GetAuthenticationByID(ctx context.Context, id string) (*rpcauth.Authentication, error) {
- var authentication rpcauth.Authentication
-
- if err := s.scanAuthentication(
- s.builder.
- Select(
- "id",
- "method",
- "metadata",
- "expires_at",
- "created_at",
- "updated_at",
- ).
- From("authentications").
- Where(sq.Eq{"id": id}).
- QueryRowContext(ctx), &authentication); err != nil {
- return nil, fmt.Errorf(
- "getting authentication by token: %w",
- s.driver.AdaptError(err),
- )
- }
-
- return &authentication, nil
-}
-
-// ListAuthentications lists a page of Authentications from the backing store.
-func (s *Store) ListAuthentications(ctx context.Context, req *storage.ListRequest[storageauth.ListAuthenticationsPredicate]) (set storage.ResultSet[*rpcauth.Authentication], err error) {
- defer func() {
- if err != nil {
- err = fmt.Errorf(
- "listing authentications: %w",
- s.driver.AdaptError(err),
- )
- }
- }()
-
- // adjust the query parameters within normal bounds
- req.QueryParams.Normalize()
-
- query := s.builder.
- Select(
- "id",
- "method",
- "metadata",
- "expires_at",
- "created_at",
- "updated_at",
- ).
- From("authentications").
- Limit(req.QueryParams.Limit + 1).
- OrderBy(fmt.Sprintf("created_at %s", req.QueryParams.Order))
-
- if req.Predicate.Method != nil {
- query = query.Where(sq.Eq{"method": *req.Predicate.Method})
- }
-
- var offset int
- if v, err := strconv.ParseInt(req.QueryParams.PageToken, 10, 64); err == nil {
- offset = int(v)
- query = query.Offset(uint64(v))
- }
-
- rows, err := query.QueryContext(ctx)
- if err != nil {
- return set, err
- }
-
- defer func() { _ = rows.Close() }()
-
- for rows.Next() {
- var authentication rpcauth.Authentication
- if err = s.scanAuthentication(rows, &authentication); err != nil {
- return
- }
-
- if len(set.Results) >= int(req.QueryParams.Limit) {
- // set the next page token to the first
- // row beyond the query limit and break
- set.NextPageToken = fmt.Sprintf("%d", offset+int(req.QueryParams.Limit))
- break
- }
-
- set.Results = append(set.Results, &authentication)
- }
-
- if err = rows.Err(); err != nil {
- return
- }
-
- return
-}
-
-func (s *Store) adaptError(fmtStr string, err *error) {
- if *err != nil {
- *err = fmt.Errorf(fmtStr, s.driver.AdaptError(*err))
- }
-}
-
-// DeleteAuthentications attempts to delete one or more Authentication instances from the backing store.
-// Use auth.DeleteByID to construct a request to delete a single Authentication by ID string.
-// Use auth.DeleteByMethod to construct a request to delete 0 or more Authentications by Method and optional expired before constraint.
-func (s *Store) DeleteAuthentications(ctx context.Context, req *storageauth.DeleteAuthenticationsRequest) (err error) {
- defer s.adaptError("deleting authentications: %w", &err)
-
- if err := req.Valid(); err != nil {
- return err
- }
-
- query := s.builder.
- Delete("authentications")
-
- if req.ID != nil {
- query = query.Where(sq.Eq{"id": req.ID})
- }
-
- if req.Method != nil {
- query = query.Where(sq.Eq{"method": req.Method})
- }
-
- if req.ExpiredBefore != nil {
- query = query.Where(sq.Lt{
- "expires_at": &storagesql.Timestamp{Timestamp: req.ExpiredBefore},
- })
- }
-
- _, err = query.ExecContext(ctx)
-
- return
-}
-
-// ExpireAuthenticationByID attempts to expire an Authentication by ID string and the provided expiry time.
-func (s *Store) ExpireAuthenticationByID(ctx context.Context, id string, expireAt *timestamppb.Timestamp) (err error) {
- defer s.adaptError("expiring authentication by id: %w", &err)
-
- _, err = s.builder.
- Update("authentications").
- Set("expires_at", &storagesql.Timestamp{Timestamp: expireAt}).
- Where(sq.Eq{"id": id}).
- ExecContext(ctx)
-
- return
-}
-
-func (s *Store) scanAuthentication(scanner sq.RowScanner, authentication *rpcauth.Authentication) error {
- var (
- expiresAt storagesql.NullableTimestamp
- createdAt storagesql.Timestamp
- updatedAt storagesql.Timestamp
- )
-
- if err := scanner.
- Scan(
- &authentication.Id,
- &authentication.Method,
- &storagesql.JSONField[*map[string]string]{T: &authentication.Metadata},
- &expiresAt,
- &createdAt,
- &updatedAt,
- ); err != nil {
- return fmt.Errorf(
- "reading authentication: %w",
- s.driver.AdaptError(err),
- )
- }
-
- authentication.ExpiresAt = expiresAt.Timestamp
- authentication.CreatedAt = createdAt.Timestamp
- authentication.UpdatedAt = updatedAt.Timestamp
-
- return nil
-}
diff --git a/internal/storage/authn/sql/store_test.go b/internal/storage/authn/sql/store_test.go
deleted file mode 100644
index db744f56a8..0000000000
--- a/internal/storage/authn/sql/store_test.go
+++ /dev/null
@@ -1,415 +0,0 @@
-package sql
-
-import (
- "context"
- "fmt"
- "testing"
- "time"
-
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
- "go.flipt.io/flipt/errors"
- "go.flipt.io/flipt/internal/storage"
- storageauth "go.flipt.io/flipt/internal/storage/authn"
- authtesting "go.flipt.io/flipt/internal/storage/authn/testing"
- storagesql "go.flipt.io/flipt/internal/storage/sql"
- sqltesting "go.flipt.io/flipt/internal/storage/sql/testing"
- rpcauth "go.flipt.io/flipt/rpc/flipt/auth"
- "go.uber.org/zap"
- "go.uber.org/zap/zaptest"
- "google.golang.org/protobuf/types/known/timestamppb"
-)
-
-var (
- someTimestamp = timestamppb.New(time.Date(2022, 10, 25, 18, 0, 0, 0, time.UTC))
- commonOpts = func(t *testing.T) []Option {
- return []Option{
- WithNowFunc(func() *timestamppb.Timestamp {
- return someTimestamp
- }),
- // tokens created will be "token:"
- WithTokenGeneratorFunc(newStaticGenerator(t, "token")),
- // ids created will be "id:"
- WithIDGeneratorFunc(newStaticGenerator(t, "id")),
- }
- }
-)
-
-func TestAuthenticationStoreHarness(t *testing.T) {
- authtesting.TestAuthenticationStoreHarness(t, func(t *testing.T) storageauth.Store {
- return newTestStore(t)()
- })
-}
-
-func TestAuthentication_CreateAuthentication(t *testing.T) {
- // established a store factory with a single seeded auth entry
- storeFn := newTestStore(t, createAuth("create_auth_id", "create_auth_token", rpcauth.Method_METHOD_TOKEN))
-
- ctx := context.TODO()
- for _, test := range []struct {
- name string
- opts func(t *testing.T) []Option
- req *storageauth.CreateAuthenticationRequest
- expectedErrAs error
- expectedToken string
- expectedAuthentication *rpcauth.Authentication
- }{
- {
- name: "successfully creates authentication",
- opts: commonOpts,
- req: &storageauth.CreateAuthenticationRequest{
- Method: rpcauth.Method_METHOD_TOKEN,
- ExpiresAt: timestamppb.New(time.Unix(2, 0)),
- Metadata: map[string]string{
- "io.flipt.auth.token.name": "access_all_areas",
- "io.flipt.auth.token.description": "The keys to the castle",
- },
- },
- expectedToken: "token:TestAuthentication_CreateAuthentication/successfully_creates_authentication",
- expectedAuthentication: &rpcauth.Authentication{
- Id: "id:TestAuthentication_CreateAuthentication/successfully_creates_authentication",
- Method: rpcauth.Method_METHOD_TOKEN,
- Metadata: map[string]string{
- "io.flipt.auth.token.name": "access_all_areas",
- "io.flipt.auth.token.description": "The keys to the castle",
- },
- ExpiresAt: timestamppb.New(time.Unix(2, 0)),
- CreatedAt: someTimestamp,
- UpdatedAt: someTimestamp,
- },
- },
- {
- name: "successfully creates authentication (no expiration)",
- opts: commonOpts,
- req: &storageauth.CreateAuthenticationRequest{
- Method: rpcauth.Method_METHOD_TOKEN,
- Metadata: map[string]string{
- "io.flipt.auth.token.name": "access_all_areas",
- "io.flipt.auth.token.description": "The keys to the castle",
- },
- },
- expectedToken: "token:TestAuthentication_CreateAuthentication/successfully_creates_authentication_(no_expiration)",
- expectedAuthentication: &rpcauth.Authentication{
- Id: "id:TestAuthentication_CreateAuthentication/successfully_creates_authentication_(no_expiration)",
- Method: rpcauth.Method_METHOD_TOKEN,
- Metadata: map[string]string{
- "io.flipt.auth.token.name": "access_all_areas",
- "io.flipt.auth.token.description": "The keys to the castle",
- },
- CreatedAt: someTimestamp,
- UpdatedAt: someTimestamp,
- },
- },
- {
- name: "fails ID uniqueness constraint",
- opts: func(t *testing.T) []Option {
- return []Option{
- WithIDGeneratorFunc(func() string {
- // return previous tests created ID
- return "create_auth_id"
- }),
- }
- },
- req: &storageauth.CreateAuthenticationRequest{
- Method: rpcauth.Method_METHOD_TOKEN,
- ExpiresAt: timestamppb.New(time.Unix(2, 0)),
- Metadata: map[string]string{
- "io.flipt.auth.token.name": "access_all_areas",
- "io.flipt.auth.token.description": "The keys to the castle",
- },
- },
- expectedErrAs: errPtr(errors.ErrInvalid("")),
- },
- {
- name: "fails token uniqueness constraint",
- opts: func(t *testing.T) []Option {
- return []Option{
- WithTokenGeneratorFunc(func() string {
- // return previous tests created token
- return "create_auth_token"
- }),
- }
- },
- req: &storageauth.CreateAuthenticationRequest{
- Method: rpcauth.Method_METHOD_TOKEN,
- ExpiresAt: timestamppb.New(time.Unix(2, 0)),
- Metadata: map[string]string{
- "io.flipt.auth.token.name": "access_all_areas",
- "io.flipt.auth.token.description": "The keys to the castle",
- },
- },
- expectedErrAs: errPtr(errors.ErrInvalid("")),
- },
- } {
- test := test
- t.Run(test.name, func(t *testing.T) {
- store := storeFn(test.opts(t)...)
-
- clientToken, created, err := store.CreateAuthentication(ctx, test.req)
- if test.expectedErrAs != nil {
- // nolint:testifylint
- require.ErrorAs(t, err, test.expectedErrAs)
- return
- }
-
- require.NoError(t, err)
- assert.Equal(t, test.expectedToken, clientToken)
- assert.Equal(t, test.expectedAuthentication, created)
- })
- }
-}
-
-func TestAuthentication_GetAuthenticationByClientToken(t *testing.T) {
- // seed database state
- ctx := context.TODO()
-
- // established a store factory with a single seeded auth entry
- storeFn := newTestStore(t, createAuth("get_auth_id", "get_auth_token", rpcauth.Method_METHOD_TOKEN))
-
- // run table tests
- for _, test := range []struct {
- name string
- clientToken string
- expectedErrAs error
- expectedAuthentication *rpcauth.Authentication
- }{
- {
- name: "error not found for unexpected clientToken",
- clientToken: "unknown",
- expectedErrAs: errPtr(errors.ErrNotFound("")),
- },
- {
- name: "successfully retrieves authentication by clientToken",
- clientToken: "get_auth_token",
- expectedAuthentication: &rpcauth.Authentication{
- Id: "get_auth_id",
- Method: rpcauth.Method_METHOD_TOKEN,
- Metadata: map[string]string{
- "io.flipt.auth.token.name": "access_some_areas",
- "io.flipt.auth.token.description": "The keys to some of the castle",
- },
- ExpiresAt: timestamppb.New(time.Unix(2, 0)),
- CreatedAt: someTimestamp,
- UpdatedAt: someTimestamp,
- },
- },
- } {
- var (
- clientToken = test.clientToken
- expectedErrAs = test.expectedErrAs
- expectedAuthentication = test.expectedAuthentication
- )
-
- t.Run(test.name, func(t *testing.T) {
- retrieved, err := storeFn(commonOpts(t)...).GetAuthenticationByClientToken(ctx, clientToken)
- if expectedErrAs != nil {
- // nolint:testifylint
- require.ErrorAs(t, err, expectedErrAs)
- return
- }
-
- require.NoError(t, err)
- assert.Equal(t, expectedAuthentication, retrieved)
- })
- }
-}
-
-func TestAuthentication_ListAuthentications_ByMethod(t *testing.T) {
- ctx := context.TODO()
-
- // increment each timestamp by 1 when seeding auths
- var i int64
- seedOpts := func(t *testing.T) []Option {
- return []Option{WithNowFunc(func() *timestamppb.Timestamp {
- i++
- return timestamppb.New(time.Unix(i, 0))
- })}
- }
-
- storeFn := newTestStore(t,
- createAuth("none_id_one", "none_client_token_one", rpcauth.Method_METHOD_NONE, withOpts(seedOpts)),
- createAuth("none_id_two", "none_client_token_two", rpcauth.Method_METHOD_NONE, withOpts(seedOpts)),
- createAuth("none_id_three", "none_client_token_three", rpcauth.Method_METHOD_NONE, withOpts(seedOpts)),
- createAuth("token_id_one", "token_client_token_one", rpcauth.Method_METHOD_TOKEN, withOpts(seedOpts)),
- createAuth("token_id_two", "token_client_token_two", rpcauth.Method_METHOD_TOKEN, withOpts(seedOpts)),
- createAuth("token_id_three", "token_client_token_three", rpcauth.Method_METHOD_TOKEN, withOpts(seedOpts)),
- )
-
- t.Run("method == none", func(t *testing.T) {
- // list predicated with none auth method
- req := storage.ListWithOptions(storageauth.ListMethod(rpcauth.Method_METHOD_NONE))
- noneMethod, err := storeFn().ListAuthentications(ctx, req)
-
- require.NoError(t, err)
- assert.Equal(t, storage.ResultSet[*rpcauth.Authentication]{
- Results: []*rpcauth.Authentication{
- {
- Id: "none_id_one",
- Method: rpcauth.Method_METHOD_NONE,
- Metadata: map[string]string{
- "io.flipt.auth.token.name": "access_some_areas",
- "io.flipt.auth.token.description": "The keys to some of the castle",
- },
- ExpiresAt: timestamppb.New(time.Unix(2, 0)),
- CreatedAt: timestamppb.New(time.Unix(1, 0)),
- UpdatedAt: timestamppb.New(time.Unix(1, 0)),
- },
- {
- Id: "none_id_two",
- Method: rpcauth.Method_METHOD_NONE,
- Metadata: map[string]string{
- "io.flipt.auth.token.name": "access_some_areas",
- "io.flipt.auth.token.description": "The keys to some of the castle",
- },
- ExpiresAt: timestamppb.New(time.Unix(2, 0)),
- CreatedAt: timestamppb.New(time.Unix(2, 0)),
- UpdatedAt: timestamppb.New(time.Unix(2, 0)),
- },
- {
- Id: "none_id_three",
- Method: rpcauth.Method_METHOD_NONE,
- Metadata: map[string]string{
- "io.flipt.auth.token.name": "access_some_areas",
- "io.flipt.auth.token.description": "The keys to some of the castle",
- },
- ExpiresAt: timestamppb.New(time.Unix(2, 0)),
- CreatedAt: timestamppb.New(time.Unix(3, 0)),
- UpdatedAt: timestamppb.New(time.Unix(3, 0)),
- },
- },
- }, noneMethod)
- })
-
- t.Run("method == token", func(t *testing.T) {
- // list predicated with token auth method
- req := storage.ListWithOptions(storageauth.ListMethod(rpcauth.Method_METHOD_TOKEN))
- tokenMethod, err := storeFn().ListAuthentications(ctx, req)
- require.NoError(t, err)
- assert.Equal(t, storage.ResultSet[*rpcauth.Authentication]{
- Results: []*rpcauth.Authentication{
- {
- Id: "token_id_one",
- Method: rpcauth.Method_METHOD_TOKEN,
- Metadata: map[string]string{
- "io.flipt.auth.token.name": "access_some_areas",
- "io.flipt.auth.token.description": "The keys to some of the castle",
- },
- ExpiresAt: timestamppb.New(time.Unix(2, 0)),
- CreatedAt: timestamppb.New(time.Unix(4, 0)),
- UpdatedAt: timestamppb.New(time.Unix(4, 0)),
- },
- {
- Id: "token_id_two",
- Method: rpcauth.Method_METHOD_TOKEN,
- Metadata: map[string]string{
- "io.flipt.auth.token.name": "access_some_areas",
- "io.flipt.auth.token.description": "The keys to some of the castle",
- },
- ExpiresAt: timestamppb.New(time.Unix(2, 0)),
- CreatedAt: timestamppb.New(time.Unix(5, 0)),
- UpdatedAt: timestamppb.New(time.Unix(5, 0)),
- },
- {
- Id: "token_id_three",
- Method: rpcauth.Method_METHOD_TOKEN,
- Metadata: map[string]string{
- "io.flipt.auth.token.name": "access_some_areas",
- "io.flipt.auth.token.description": "The keys to some of the castle",
- },
- ExpiresAt: timestamppb.New(time.Unix(2, 0)),
- CreatedAt: timestamppb.New(time.Unix(6, 0)),
- UpdatedAt: timestamppb.New(time.Unix(6, 0)),
- },
- },
- }, tokenMethod)
- })
-}
-
-type authentication struct {
- id string
- token string
- method rpcauth.Method
- optFn func(t *testing.T) []Option
-}
-
-func withOpts(optFn func(t *testing.T) []Option) func(*authentication) {
- return func(a *authentication) {
- a.optFn = optFn
- }
-}
-
-func createAuth(id, token string, method rpcauth.Method, opts ...func(*authentication)) authentication {
- a := authentication{id, token, method, nil}
- for _, opt := range opts {
- opt(&a)
- }
-
- return a
-}
-
-func newTestStore(t *testing.T, seed ...authentication) func(...Option) *Store {
- t.Helper()
-
- db, err := sqltesting.Open()
- if err != nil {
- t.Fatal(err)
- }
-
- var (
- ctx = context.TODO()
- logger = zaptest.NewLogger(t)
- storeFn = func(opts ...Option) *Store {
- return NewStore(
- db.Driver,
- storagesql.BuilderFor(db.DB, db.Driver, true),
- logger,
- opts...,
- )
- }
- )
-
- // seed any authentication fixtures
- for _, a := range seed {
- a := a
- opts := []Option{
- WithNowFunc(func() *timestamppb.Timestamp {
- return someTimestamp
- }),
- WithTokenGeneratorFunc(func() string { return a.token }),
- WithIDGeneratorFunc(func() string { return a.id }),
- }
-
- if a.optFn != nil {
- opts = append(opts, a.optFn(t)...)
- }
-
- clientToken, _, err := storeFn(opts...).CreateAuthentication(ctx, &storageauth.CreateAuthenticationRequest{
- Method: a.method,
- ExpiresAt: timestamppb.New(time.Unix(2, 0)),
- Metadata: map[string]string{
- "io.flipt.auth.token.name": "access_some_areas",
- "io.flipt.auth.token.description": "The keys to some of the castle",
- },
- })
- require.NoError(t, err)
- require.Equal(t, a.token, clientToken)
-
- logger.Debug("seeded authentication", zap.String("id", a.id))
-
- time.Sleep(10 * time.Millisecond)
- }
-
- return storeFn
-}
-
-func newStaticGenerator(t *testing.T, purpose string) func() string {
- t.Helper()
-
- return func() string {
- return fmt.Sprintf("%s:%s", purpose, t.Name())
- }
-}
-
-func errPtr[E error](e E) *E {
- return &e
-}
diff --git a/internal/storage/fs/oci/store.go b/internal/storage/fs/oci/store.go
deleted file mode 100644
index 12f1c56377..0000000000
--- a/internal/storage/fs/oci/store.go
+++ /dev/null
@@ -1,103 +0,0 @@
-package oci
-
-import (
- "context"
- "sync"
-
- "github.com/opencontainers/go-digest"
- "go.flipt.io/flipt/internal/containers"
- "go.flipt.io/flipt/internal/oci"
- "go.flipt.io/flipt/internal/storage"
- storagefs "go.flipt.io/flipt/internal/storage/fs"
- "go.uber.org/zap"
-)
-
-var _ storagefs.SnapshotStore = (*SnapshotStore)(nil)
-
-// SnapshotStore is an implementation storage.SnapshotStore backed by OCI repositories.
-// It fetches instances of OCI manifests and uses them to build snapshots from their contents.
-type SnapshotStore struct {
- *storagefs.Poller
-
- logger *zap.Logger
-
- store *oci.Store
- ref oci.Reference
-
- mu sync.RWMutex
- snap storage.ReadOnlyStore
- lastDigest digest.Digest
-
- pollOpts []containers.Option[storagefs.Poller]
-}
-
-// View accepts a function which takes a *StoreSnapshot.
-// The SnapshotStore will supply a snapshot which is valid
-// for the lifetime of the provided function call.
-func (s *SnapshotStore) View(_ context.Context, fn func(storage.ReadOnlyStore) error) error {
- s.mu.RLock()
- defer s.mu.RUnlock()
- return fn(s.snap)
-}
-
-// NewSnapshotStore constructs and configures a Store.
-// The store uses the connection and credential details provided to build
-// *storagefs.StoreSnapshot implementations around a target OCI repository.
-func NewSnapshotStore(ctx context.Context, logger *zap.Logger, store *oci.Store, ref oci.Reference, opts ...containers.Option[SnapshotStore]) (_ *SnapshotStore, err error) {
- s := &SnapshotStore{
- logger: logger,
- store: store,
- ref: ref,
- }
-
- containers.ApplyAll(s, opts...)
-
- if _, err := s.update(ctx); err != nil {
- return nil, err
- }
-
- s.Poller = storagefs.NewPoller(logger, ctx, s.update, s.pollOpts...)
-
- go s.Poller.Poll()
-
- return s, nil
-}
-
-// WithPollOptions configures the options used periodically invoke the update procedure
-func WithPollOptions(opts ...containers.Option[storagefs.Poller]) containers.Option[SnapshotStore] {
- return func(s *SnapshotStore) {
- s.pollOpts = append(s.pollOpts, opts...)
- }
-}
-
-func (s *SnapshotStore) String() string {
- return "oci"
-}
-
-// update attempts to fetch the latest state for the target OCi repository and tag.
-// If the state has not change sinced the last observed image digest it skips
-// updating the snapshot and returns false (not modified).
-func (s *SnapshotStore) update(ctx context.Context) (bool, error) {
- resp, err := s.store.Fetch(ctx, s.ref, oci.IfNoMatch(s.lastDigest))
- if err != nil {
- return false, err
- }
-
- // return not modified as the last observed digest matched
- // the remote digest
- if resp.Matched {
- return false, nil
- }
-
- snap, err := storagefs.SnapshotFromFiles(s.logger, resp.Files, storagefs.WithEtag(resp.Digest.Hex()))
- if err != nil {
- return false, err
- }
-
- s.mu.Lock()
- s.lastDigest = resp.Digest
- s.snap = snap
- s.mu.Unlock()
-
- return true, nil
-}
diff --git a/internal/storage/fs/oci/store_test.go b/internal/storage/fs/oci/store_test.go
deleted file mode 100644
index 837131751f..0000000000
--- a/internal/storage/fs/oci/store_test.go
+++ /dev/null
@@ -1,159 +0,0 @@
-package oci
-
-import (
- "bytes"
- "context"
- "fmt"
- "path"
- "testing"
- "time"
-
- "github.com/opencontainers/go-digest"
- v1 "github.com/opencontainers/image-spec/specs-go/v1"
- "github.com/stretchr/testify/require"
- "go.flipt.io/flipt/internal/containers"
- fliptoci "go.flipt.io/flipt/internal/oci"
- "go.flipt.io/flipt/internal/storage"
- "go.flipt.io/flipt/internal/storage/fs"
- "go.uber.org/zap/zaptest"
- "oras.land/oras-go/v2"
- "oras.land/oras-go/v2/content/oci"
-)
-
-func Test_SourceString(t *testing.T) {
- require.Equal(t, "oci", (&SnapshotStore{}).String())
-}
-
-func Test_SourceSubscribe(t *testing.T) {
- ch := make(chan struct{})
- store, target := testStore(t, WithPollOptions(
- fs.WithInterval(time.Second),
- fs.WithNotify(t, func(modified bool) {
- if modified {
- close(ch)
- }
- }),
- ))
-
- ctx := context.Background()
-
- require.NoError(t, store.View(ctx, func(s storage.ReadOnlyStore) error {
- _, err := s.GetNamespace(ctx, storage.NewNamespace("production"))
- require.NoError(t, err)
-
- _, err = s.GetFlag(ctx, storage.NewResource("production", "foo"))
- require.Error(t, err, "should error as flag should not exist yet")
-
- return nil
- }))
-
- updateRepoContents(t, target,
- layer(
- "production",
- `{"namespace":"production","flags":[{"key":"foo","name":"Foo"}]}`,
- fliptoci.MediaTypeFliptNamespace,
- ),
- )
-
- t.Log("waiting for new snapshot")
-
- // assert matching state
- select {
- case <-ch:
- case <-time.After(time.Minute):
- t.Fatal("timed out waiting for snapshot")
- }
-
- t.Log("received new snapshot")
-
- require.NoError(t, store.View(ctx, func(s storage.ReadOnlyStore) error {
- _, err := s.GetFlag(ctx, storage.NewResource("production", "foo"))
- require.NoError(t, err)
- return nil
- }))
-}
-
-func testStore(t *testing.T, opts ...containers.Option[SnapshotStore]) (*SnapshotStore, oras.Target) {
- t.Helper()
-
- target, dir, repo := testRepository(t,
- layer("production", `{"namespace":"production"}`, fliptoci.MediaTypeFliptNamespace),
- )
-
- store, err := fliptoci.NewStore(zaptest.NewLogger(t), dir)
- require.NoError(t, err)
-
- ref, err := fliptoci.ParseReference(fmt.Sprintf("flipt://local/%s:latest", repo))
- require.NoError(t, err)
-
- ctx, cancel := context.WithCancel(context.Background())
- t.Cleanup(cancel)
-
- source, err := NewSnapshotStore(ctx,
- zaptest.NewLogger(t),
- store,
- ref,
- opts...)
- require.NoError(t, err)
-
- t.Cleanup(func() {
- _ = source.Close()
- })
-
- return source, target
-}
-
-func layer(ns, payload, mediaType string) func(*testing.T, oras.Target) v1.Descriptor {
- return func(t *testing.T, store oras.Target) v1.Descriptor {
- t.Helper()
-
- desc := v1.Descriptor{
- Digest: digest.FromString(payload),
- Size: int64(len(payload)),
- MediaType: mediaType,
- Annotations: map[string]string{
- fliptoci.AnnotationFliptNamespace: ns,
- },
- }
-
- require.NoError(t, store.Push(context.TODO(), desc, bytes.NewReader([]byte(payload))))
-
- return desc
- }
-}
-
-func testRepository(t *testing.T, layerFuncs ...func(*testing.T, oras.Target) v1.Descriptor) (oras.Target, string, string) {
- t.Helper()
-
- var (
- repository = "testrepo"
- dir = t.TempDir()
- )
-
- store, err := oci.New(path.Join(dir, repository))
- require.NoError(t, err)
-
- store.AutoSaveIndex = true
-
- updateRepoContents(t, store, layerFuncs...)
-
- return store, dir, repository
-}
-
-func updateRepoContents(t *testing.T, target oras.Target, layerFuncs ...func(*testing.T, oras.Target) v1.Descriptor) {
- t.Helper()
- ctx := context.TODO()
-
- var layers []v1.Descriptor
- for _, fn := range layerFuncs {
- layers = append(layers, fn(t, target))
- }
-
- desc, err := oras.PackManifest(ctx, target, oras.PackManifestVersion1_1_RC4, fliptoci.MediaTypeFliptFeatures, oras.PackManifestOptions{
- ManifestAnnotations: map[string]string{},
- Layers: layers,
- })
- require.NoError(t, err)
-
- require.NoError(t, target.Tag(ctx, desc, "latest"))
-}
diff --git a/internal/storage/fs/store/store.go b/internal/storage/fs/store/store.go
index adce939ed7..1d3d097679 100644
--- a/internal/storage/fs/store/store.go
+++ b/internal/storage/fs/store/store.go
@@ -7,19 +7,15 @@ import (
"os"
"strconv"
- "oras.land/oras-go/v2"
-
"github.com/go-git/go-git/v5/plumbing/transport/http"
gitssh "github.com/go-git/go-git/v5/plumbing/transport/ssh"
"go.flipt.io/flipt/internal/config"
"go.flipt.io/flipt/internal/containers"
- "go.flipt.io/flipt/internal/oci"
"go.flipt.io/flipt/internal/storage"
storagefs "go.flipt.io/flipt/internal/storage/fs"
"go.flipt.io/flipt/internal/storage/fs/git"
"go.flipt.io/flipt/internal/storage/fs/local"
"go.flipt.io/flipt/internal/storage/fs/object"
- storageoci "go.flipt.io/flipt/internal/storage/fs/oci"
"go.uber.org/zap"
"gocloud.dev/blob"
"gocloud.dev/blob/azureblob"
@@ -128,45 +124,6 @@ func NewStore(ctx context.Context, logger *zap.Logger, cfg *config.Config) (_ st
return storagefs.NewStore(storagefs.NewSingleReferenceStore(logger, snapStore)), nil
case config.ObjectStorageType:
return newObjectStore(ctx, cfg, logger)
- case config.OCIStorageType:
- var opts []containers.Option[oci.StoreOptions]
- if auth := cfg.Storage.OCI.Authentication; auth != nil {
- opt, err := oci.WithCredentials(
- auth.Type,
- auth.Username,
- auth.Password,
- )
- if err != nil {
- return nil, err
- }
- opts = append(opts, opt)
- }
-
- // The default is the 1.1 version, this is why we don't need to check it in here.
- if cfg.Storage.OCI.ManifestVersion == config.OCIManifestVersion10 {
- opts = append(opts, oci.WithManifestVersion(oras.PackManifestVersion1_0))
- }
-
- ocistore, err := oci.NewStore(logger, cfg.Storage.OCI.BundlesDirectory, opts...)
- if err != nil {
- return nil, err
- }
-
- ref, err := oci.ParseReference(cfg.Storage.OCI.Repository)
- if err != nil {
- return nil, err
- }
-
- snapStore, err := storageoci.NewSnapshotStore(ctx, logger, ocistore, ref,
- storageoci.WithPollOptions(
- storagefs.WithInterval(cfg.Storage.OCI.PollInterval),
- ),
- )
- if err != nil {
- return nil, err
- }
-
- return storagefs.NewStore(storagefs.NewSingleReferenceStore(logger, snapStore)), nil
}
return nil, fmt.Errorf("unexpected storage type: %q", cfg.Storage.Type)
diff --git a/internal/storage/oplock/sql/sql.go b/internal/storage/oplock/sql/sql.go
deleted file mode 100644
index b03fde7d84..0000000000
--- a/internal/storage/oplock/sql/sql.go
+++ /dev/null
@@ -1,164 +0,0 @@
-package memory
-
-import (
- "context"
- "fmt"
- "time"
-
- sq "github.com/Masterminds/squirrel"
- "go.flipt.io/flipt/errors"
- "go.flipt.io/flipt/internal/storage/oplock"
- storagesql "go.flipt.io/flipt/internal/storage/sql"
- "go.uber.org/zap"
-)
-
-// Service is an in-memory implementation of the oplock.Service.
-// It is only safe for single instance / in-process use.
-type Service struct {
- logger *zap.Logger
- driver storagesql.Driver
- builder sq.StatementBuilderType
-}
-
-// New constructs and configures a new service instance.
-func New(logger *zap.Logger, driver storagesql.Driver, builder sq.StatementBuilderType) *Service {
- return &Service{
- logger: logger,
- driver: driver,
- builder: builder,
- }
-}
-
-// TryAcquire will attempt to obtain a lock for the supplied operation name for the specified duration.
-// If it succeeds then the returned boolean (acquired) will be true, else false.
-// The lock entry associated with the last successful acquisition is also returned.
-// Given the lock was acquired successfully this will be the entry just created.
-func (s *Service) TryAcquire(ctx context.Context, operation oplock.Operation, duration time.Duration) (acquired bool, entry oplock.LockEntry, err error) {
- entry, err = s.readEntry(ctx, operation)
- if err != nil {
- if _, match := errors.As[errors.ErrNotFound](err); match {
- // entry does not exist so we try and create one
- entry, err := s.insertEntry(ctx, operation, duration)
- if err != nil {
- if _, match := errors.As[errors.ErrInvalid](err); match {
- // check if the entry is invalid due to
- // uniqueness constraint violation
- // if so re-read the current entry and return that
- entry, err := s.readEntry(ctx, operation)
- return false, entry, err
- }
-
- return false, entry, err
- }
-
- return true, entry, nil
- }
-
- // something went wrong
- return false, entry, err
- }
-
- // entry exists so first check the acquired until has elapsed
- if time.Now().UTC().Before(entry.AcquiredUntil) {
- // return early as the lock is still acquired
- return false, entry, nil
- }
-
- acquired, err = s.acquireEntry(ctx, &entry, duration)
-
- return acquired, entry, err
-}
-
-func (s *Service) acquireEntry(ctx context.Context, entry *oplock.LockEntry, dur time.Duration) (acquired bool, err error) {
- defer func() {
- if err != nil {
- err = fmt.Errorf("updating existing entry: %w", s.driver.AdaptError(err))
- }
- }()
-
- now := time.Now().UTC()
- query := s.builder.Update("operation_lock").
- Set("version", entry.Version+1).
- Set("last_acquired_at", now).
- Set("acquired_until", now.Add(dur)).
- Where(sq.Eq{
- "operation": string(entry.Operation),
- // ensure current entry has not been updated
- "version": entry.Version,
- })
-
- res, err := query.ExecContext(ctx)
- if err != nil {
- return false, err
- }
-
- count, err := res.RowsAffected()
- if err != nil {
- return false, err
- }
-
- if count < 1 {
- // current entry version does not match
- // therefore we can assume it was updated
- // by concurrent lock acquirer
- return false, nil
- }
-
- entry.Version++
- entry.LastAcquired = now
- entry.AcquiredUntil = now.Add(dur)
- return true, nil
-}
-
-func (s *Service) insertEntry(ctx context.Context, op oplock.Operation, dur time.Duration) (entry oplock.LockEntry, err error) {
- defer func() {
- if err != nil {
- err = fmt.Errorf("inserting new entry: %w", err)
- }
- }()
-
- entry.Operation = op
- entry.Version = 1
- entry.LastAcquired = time.Now().UTC()
- entry.AcquiredUntil = entry.LastAcquired.Add(dur)
-
- _, err = s.builder.Insert("operation_lock").
- Columns(
- "operation",
- "version",
- "last_acquired_at",
- "acquired_until",
- ).Values(
- &entry.Operation,
- &entry.Version,
- &entry.LastAcquired,
- &entry.AcquiredUntil,
- ).ExecContext(ctx)
-
- return entry, s.driver.AdaptError(err)
-}
-
-func (s *Service) readEntry(ctx context.Context, operation oplock.Operation) (entry oplock.LockEntry, err error) {
- defer func() {
- if err != nil {
- err = fmt.Errorf("reading entry: %w", err)
- }
- }()
-
- err = s.builder.Select(
- "operation",
- "version",
- "last_acquired_at",
- "acquired_until",
- ).From("operation_lock").
- Where(sq.Eq{"operation": string(operation)}).
- QueryRowContext(ctx).
- Scan(
- &entry.Operation,
- &entry.Version,
- &entry.LastAcquired,
- &entry.AcquiredUntil,
- )
-
- return entry, s.driver.AdaptError(err)
-}
diff --git a/internal/storage/oplock/sql/sql_test.go b/internal/storage/oplock/sql/sql_test.go
deleted file mode 100644
index cf1d24bd0e..0000000000
--- a/internal/storage/oplock/sql/sql_test.go
+++ /dev/null
@@ -1,26 +0,0 @@
-package memory
-
-import (
- "testing"
-
- oplocktesting "go.flipt.io/flipt/internal/storage/oplock/testing"
- storagesql "go.flipt.io/flipt/internal/storage/sql"
- sqltesting "go.flipt.io/flipt/internal/storage/sql/testing"
- "go.uber.org/zap/zaptest"
-)
-
-func Test_Harness(t *testing.T) {
- logger := zaptest.NewLogger(t)
- db, err := sqltesting.Open()
- if err != nil {
- t.Fatal(err)
- }
-
- oplocktesting.Harness(
- t,
- New(
- logger,
- db.Driver,
- storagesql.BuilderFor(db.DB, db.Driver, true),
- ))
-}
diff --git a/internal/storage/sql/adapted_driver.go b/internal/storage/sql/adapted_driver.go
deleted file mode 100644
index 7785be2e24..0000000000
--- a/internal/storage/sql/adapted_driver.go
+++ /dev/null
@@ -1,64 +0,0 @@
-package sql
-
-import (
- "context"
- "database/sql/driver"
- "time"
-
- pgx "github.com/jackc/pgx/v5/stdlib"
-)
-
-const adaptedDriverOpenTimeout = 60 * time.Second
-
-// This is the wrapper around sql driver. By default, pgx driver returns connection
-// error with the host, username and password. `adaptedDriver` and `postgresConnector`
-// allow to customize errors and preventing leakage of the credentials to outside.
-func newAdaptedPostgresDriver(d Driver) driver.Driver {
- return &adaptedDriver{origin: &pgx.Driver{}, adapter: d}
-}
-
-var _ driver.Driver = (*adaptedDriver)(nil)
-var _ driver.DriverContext = (*adaptedDriver)(nil)
-
-type adaptedDriver struct {
- adapter Driver
- origin driver.DriverContext
-}
-
-func (d *adaptedDriver) Open(name string) (driver.Conn, error) {
- connector, err := d.OpenConnector(name)
- if err != nil {
- return nil, d.adapter.AdaptError(err)
- }
- ctx, cancel := context.WithTimeout(context.Background(), adaptedDriverOpenTimeout)
- defer cancel()
- return connector.Connect(ctx)
-}
-
-func (d *adaptedDriver) OpenConnector(name string) (driver.Connector, error) {
- connector, err := d.origin.OpenConnector(name)
- if err != nil {
- return nil, d.adapter.AdaptError(err)
- }
- return &adaptedConnector{origin: connector, driver: d, adapter: d.adapter}, nil
-}
-
-var _ driver.Connector = (*adaptedConnector)(nil)
-
-type adaptedConnector struct {
- origin driver.Connector
- driver driver.Driver
- adapter Driver
-}
-
-func (c *adaptedConnector) Driver() driver.Driver {
- return c.driver
-}
-
-func (c *adaptedConnector) Connect(ctx context.Context) (driver.Conn, error) {
- conn, err := c.origin.Connect(ctx)
- if err != nil {
- return nil, c.adapter.AdaptError(err)
- }
- return conn, nil
-}
diff --git a/internal/storage/sql/adapted_driver_test.go b/internal/storage/sql/adapted_driver_test.go
deleted file mode 100644
index 2b29a25705..0000000000
--- a/internal/storage/sql/adapted_driver_test.go
+++ /dev/null
@@ -1,61 +0,0 @@
-package sql
-
-import (
- "context"
-
- "testing"
-
- "github.com/jackc/pgx/v5/pgconn"
- "github.com/stretchr/testify/mock"
- "github.com/stretchr/testify/require"
-)
-
-func TestAdaptedDriver(t *testing.T) {
- mockDriver := NewMockDriverContext(t)
- t.Run("failure", func(t *testing.T) {
- name := "pgx://failure"
- mockDriver.On("OpenConnector", name).Return(nil, &pgconn.PgError{})
- d := &adaptedDriver{origin: mockDriver, adapter: Postgres}
- _, err := d.Open(name)
- require.Error(t, err)
- })
- t.Run("success", func(t *testing.T) {
- o := newMockConnector(t)
- var mockConn = &mockDriverConn{}
- o.On("Connect", mock.Anything).Once().Return(mockConn, nil)
- name := "pgx://success"
- mockDriver.On("OpenConnector", name).Return(o, nil)
- d := &adaptedDriver{origin: mockDriver, adapter: Postgres}
- conn, err := d.Open(name)
- require.NoError(t, err)
- require.Equal(t, mockConn, conn)
- })
-}
-
-func TestAdaptedConnectorConnect(t *testing.T) {
- o := newMockConnector(t)
- d := &adaptedDriver{}
- c := &adaptedConnector{
- origin: o,
- adapter: Postgres,
- driver: d,
- }
- require.Equal(t, d, c.Driver())
- t.Run("failure", func(t *testing.T) {
- var mockConn *mockDriverConn
- ctx := context.Background()
- o.On("Connect", ctx).Once().Return(mockConn, &pgconn.ConnectError{})
- _, err := c.Connect(ctx)
- require.Error(t, err)
- require.Equal(t, err, errConnectionFailed)
- })
-
- t.Run("success", func(t *testing.T) {
- var mockConn = &mockDriverConn{}
- ctx := context.Background()
- o.On("Connect", ctx).Once().Return(mockConn, nil)
- conn, err := c.Connect(ctx)
- require.NoError(t, err)
- require.Equal(t, mockConn, conn)
- })
-}
diff --git a/internal/storage/sql/common/evaluation.go b/internal/storage/sql/common/evaluation.go
deleted file mode 100644
index a454f3cb4a..0000000000
--- a/internal/storage/sql/common/evaluation.go
+++ /dev/null
@@ -1,415 +0,0 @@
-package common
-
-import (
- "context"
- "database/sql"
- "sort"
-
- sq "github.com/Masterminds/squirrel"
- "go.flipt.io/flipt/internal/storage"
- flipt "go.flipt.io/flipt/rpc/flipt"
-)
-
-func (s *Store) GetEvaluationRules(ctx context.Context, flag storage.ResourceRequest) (_ []*storage.EvaluationRule, err error) {
- ruleMetaRows, err := s.builder.
- Select("id, \"rank\", segment_operator").
- From("rules").
- Where(sq.Eq{"flag_key": flag.Key, "namespace_key": flag.Namespace()}).
- QueryContext(ctx)
- if err != nil {
- return nil, err
- }
-
- defer func() {
- if cerr := ruleMetaRows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- type RuleMeta struct {
- ID string
- Rank int32
- SegmentOperator flipt.SegmentOperator
- }
-
- rmMap := make(map[string]*RuleMeta)
-
- ruleIDs := make([]string, 0)
- for ruleMetaRows.Next() {
- var rm RuleMeta
-
- if err := ruleMetaRows.Scan(&rm.ID, &rm.Rank, &rm.SegmentOperator); err != nil {
- return nil, err
- }
-
- rmMap[rm.ID] = &rm
- ruleIDs = append(ruleIDs, rm.ID)
- }
-
- if err := ruleMetaRows.Err(); err != nil {
- return nil, err
- }
-
- if err := ruleMetaRows.Close(); err != nil {
- return nil, err
- }
-
- rows, err := s.builder.Select(`
- rs.rule_id,
- rs.segment_key,
- s.match_type AS segment_match_type,
- c.id AS constraint_id,
- c."type" AS constraint_type,
- c.property AS constraint_property,
- c.operator AS constraint_operator,
- c.value AS constraint_value
- `).
- From("rule_segments AS rs").
- Join(`segments AS s ON (rs.segment_key = s."key" AND rs.namespace_key = s.namespace_key)`).
- LeftJoin(`constraints AS c ON (s."key" = c.segment_key AND s.namespace_key = c.namespace_key)`).
- Where(sq.Eq{"rs.rule_id": ruleIDs}).
- QueryContext(ctx)
- if err != nil {
- return nil, err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- var (
- uniqueRules = make(map[string]*storage.EvaluationRule)
- rules = []*storage.EvaluationRule{}
- )
-
- for rows.Next() {
- var (
- intermediateStorageRule struct {
- ID string
- NamespaceKey string
- FlagKey string
- SegmentKey string
- SegmentMatchType flipt.MatchType
- SegmentOperator flipt.SegmentOperator
- Rank int32
- }
- optionalConstraint optionalConstraint
- )
-
- if err := rows.Scan(
- &intermediateStorageRule.ID,
- &intermediateStorageRule.SegmentKey,
- &intermediateStorageRule.SegmentMatchType,
- &optionalConstraint.Id,
- &optionalConstraint.Type,
- &optionalConstraint.Property,
- &optionalConstraint.Operator,
- &optionalConstraint.Value); err != nil {
- return rules, err
- }
-
- rm := rmMap[intermediateStorageRule.ID]
-
- intermediateStorageRule.FlagKey = flag.Key
- intermediateStorageRule.NamespaceKey = flag.Namespace()
- intermediateStorageRule.Rank = rm.Rank
- intermediateStorageRule.SegmentOperator = rm.SegmentOperator
-
- if existingRule, ok := uniqueRules[intermediateStorageRule.ID]; ok {
- var constraint *storage.EvaluationConstraint
- if optionalConstraint.Id.Valid {
- constraint = &storage.EvaluationConstraint{
- ID: optionalConstraint.Id.String,
- Type: flipt.ComparisonType(optionalConstraint.Type.Int32),
- Property: optionalConstraint.Property.String,
- Operator: optionalConstraint.Operator.String,
- Value: optionalConstraint.Value.String,
- }
- }
-
- segment, ok := existingRule.Segments[intermediateStorageRule.SegmentKey]
- if !ok {
- ses := &storage.EvaluationSegment{
- SegmentKey: intermediateStorageRule.SegmentKey,
- MatchType: intermediateStorageRule.SegmentMatchType,
- }
-
- if constraint != nil {
- ses.Constraints = []storage.EvaluationConstraint{*constraint}
- }
-
- existingRule.Segments[intermediateStorageRule.SegmentKey] = ses
- } else if constraint != nil {
- segment.Constraints = append(segment.Constraints, *constraint)
- }
- } else {
- // haven't seen this rule before
- newRule := &storage.EvaluationRule{
- ID: intermediateStorageRule.ID,
- NamespaceKey: intermediateStorageRule.NamespaceKey,
- FlagKey: intermediateStorageRule.FlagKey,
- Rank: intermediateStorageRule.Rank,
- SegmentOperator: intermediateStorageRule.SegmentOperator,
- Segments: make(map[string]*storage.EvaluationSegment),
- }
-
- var constraint *storage.EvaluationConstraint
- if optionalConstraint.Id.Valid {
- constraint = &storage.EvaluationConstraint{
- ID: optionalConstraint.Id.String,
- Type: flipt.ComparisonType(optionalConstraint.Type.Int32),
- Property: optionalConstraint.Property.String,
- Operator: optionalConstraint.Operator.String,
- Value: optionalConstraint.Value.String,
- }
- }
-
- ses := &storage.EvaluationSegment{
- SegmentKey: intermediateStorageRule.SegmentKey,
- MatchType: intermediateStorageRule.SegmentMatchType,
- }
-
- if constraint != nil {
- ses.Constraints = []storage.EvaluationConstraint{*constraint}
- }
-
- newRule.Segments[intermediateStorageRule.SegmentKey] = ses
-
- uniqueRules[newRule.ID] = newRule
- rules = append(rules, newRule)
- }
- }
-
- sort.Slice(rules, func(i, j int) bool {
- return rules[i].Rank < rules[j].Rank
- })
-
- if err := rows.Err(); err != nil {
- return rules, err
- }
-
- if err := rows.Close(); err != nil {
- return rules, err
- }
-
- return rules, nil
-}
-
-func (s *Store) GetEvaluationDistributions(ctx context.Context, r storage.ResourceRequest, rule storage.IDRequest) (_ []*storage.EvaluationDistribution, err error) {
- rows, err := s.builder.Select("d.id, d.rule_id, d.variant_id, d.rollout, v.\"key\", v.attachment").
- From("distributions d").
- Join("variants v ON (d.variant_id = v.id)").
- Where(sq.Eq{"d.rule_id": rule.ID}).
- OrderBy("d.created_at ASC").
- QueryContext(ctx)
- if err != nil {
- return nil, err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- var distributions []*storage.EvaluationDistribution
-
- for rows.Next() {
- var (
- d storage.EvaluationDistribution
- attachment sql.NullString
- )
-
- if err := rows.Scan(
- &d.ID, &d.RuleID, &d.VariantID, &d.Rollout, &d.VariantKey, &attachment,
- ); err != nil {
- return distributions, err
- }
-
- if attachment.Valid {
- attachmentString, err := compactJSONString(attachment.String)
- if err != nil {
- return distributions, err
- }
- d.VariantAttachment = attachmentString
- }
-
- distributions = append(distributions, &d)
- }
-
- if err := rows.Err(); err != nil {
- return distributions, err
- }
-
- if err := rows.Close(); err != nil {
- return distributions, err
- }
-
- return distributions, nil
-}
-
-func (s *Store) GetEvaluationRollouts(ctx context.Context, flag storage.ResourceRequest) (_ []*storage.EvaluationRollout, err error) {
- rows, err := s.builder.Select(`
- r.id,
- r.namespace_key,
- r."type",
- r."rank",
- rt.percentage,
- rt.value,
- rss.segment_key,
- rss.rollout_segment_value,
- rss.segment_operator,
- rss.match_type,
- rss.constraint_type,
- rss.constraint_property,
- rss.constraint_operator,
- rss.constraint_value
- `).
- From("rollouts AS r").
- LeftJoin("rollout_thresholds AS rt ON (r.id = rt.rollout_id)").
- LeftJoin(`(
- SELECT
- rs.rollout_id,
- rsr.segment_key,
- s.match_type,
- rs.value AS rollout_segment_value,
- rs.segment_operator AS segment_operator,
- c."type" AS constraint_type,
- c.property AS constraint_property,
- c.operator AS constraint_operator,
- c.value AS constraint_value
- FROM rollout_segments AS rs
- JOIN rollout_segment_references AS rsr ON (rs.id = rsr.rollout_segment_id)
- JOIN segments AS s ON (rsr.segment_key = s."key" AND rsr.namespace_key = s.namespace_key)
- LEFT JOIN constraints AS c ON (rsr.segment_key = c.segment_key AND rsr.namespace_key = c.namespace_key)
- ) rss ON (r.id = rss.rollout_id)
- `).
- Where(sq.Eq{"r.namespace_key": flag.Namespace(), "r.flag_key": flag.Key}).
- OrderBy(`r."rank" ASC`).
- QueryContext(ctx)
- if err != nil {
- return nil, err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- var (
- uniqueSegmentedRollouts = make(map[string]*storage.EvaluationRollout)
- rollouts = []*storage.EvaluationRollout{}
- )
-
- for rows.Next() {
- var (
- rolloutId string
- evaluationRollout storage.EvaluationRollout
- rtPercentageNumber sql.NullFloat64
- rtPercentageValue sql.NullBool
- rsSegmentKey sql.NullString
- rsSegmentValue sql.NullBool
- rsSegmentOperator sql.NullInt32
- rsMatchType sql.NullInt32
- optionalConstraint optionalConstraint
- )
-
- if err := rows.Scan(
- &rolloutId,
- &evaluationRollout.NamespaceKey,
- &evaluationRollout.RolloutType,
- &evaluationRollout.Rank,
- &rtPercentageNumber,
- &rtPercentageValue,
- &rsSegmentKey,
- &rsSegmentValue,
- &rsSegmentOperator,
- &rsMatchType,
- &optionalConstraint.Type,
- &optionalConstraint.Property,
- &optionalConstraint.Operator,
- &optionalConstraint.Value,
- ); err != nil {
- return rollouts, err
- }
-
- if rtPercentageNumber.Valid && rtPercentageValue.Valid {
- storageThreshold := &storage.RolloutThreshold{
- Percentage: float32(rtPercentageNumber.Float64),
- Value: rtPercentageValue.Bool,
- }
-
- evaluationRollout.Threshold = storageThreshold
- } else if rsSegmentKey.Valid &&
- rsSegmentValue.Valid &&
- rsSegmentOperator.Valid &&
- rsMatchType.Valid {
-
- var c *storage.EvaluationConstraint
- if optionalConstraint.Type.Valid {
- c = &storage.EvaluationConstraint{
- Type: flipt.ComparisonType(optionalConstraint.Type.Int32),
- Property: optionalConstraint.Property.String,
- Operator: optionalConstraint.Operator.String,
- Value: optionalConstraint.Value.String,
- }
- }
-
- if existingRolloutSegment, ok := uniqueSegmentedRollouts[rolloutId]; ok {
- // check if segment exists and either append constraints to an already existing segment,
- // or add another segment to the map.
- es, innerOk := existingRolloutSegment.Segment.Segments[rsSegmentKey.String]
- if innerOk {
- if c != nil {
- es.Constraints = append(es.Constraints, *c)
- }
- } else {
-
- ses := &storage.EvaluationSegment{
- SegmentKey: rsSegmentKey.String,
- MatchType: flipt.MatchType(rsMatchType.Int32),
- }
-
- if c != nil {
- ses.Constraints = []storage.EvaluationConstraint{*c}
- }
-
- existingRolloutSegment.Segment.Segments[rsSegmentKey.String] = ses
- }
-
- continue
- }
-
- storageSegment := &storage.RolloutSegment{
- Value: rsSegmentValue.Bool,
- SegmentOperator: flipt.SegmentOperator(rsSegmentOperator.Int32),
- Segments: make(map[string]*storage.EvaluationSegment),
- }
-
- ses := &storage.EvaluationSegment{
- SegmentKey: rsSegmentKey.String,
- MatchType: flipt.MatchType(rsMatchType.Int32),
- }
-
- if c != nil {
- ses.Constraints = []storage.EvaluationConstraint{*c}
- }
-
- storageSegment.Segments[rsSegmentKey.String] = ses
-
- evaluationRollout.Segment = storageSegment
- uniqueSegmentedRollouts[rolloutId] = &evaluationRollout
- }
-
- rollouts = append(rollouts, &evaluationRollout)
- }
-
- if err := rows.Err(); err != nil {
- return rollouts, err
- }
-
- return rollouts, nil
-}
diff --git a/internal/storage/sql/common/flag.go b/internal/storage/sql/common/flag.go
deleted file mode 100644
index c7d76d1f0c..0000000000
--- a/internal/storage/sql/common/flag.go
+++ /dev/null
@@ -1,620 +0,0 @@
-package common
-
-import (
- "bytes"
- "context"
- "database/sql"
- "encoding/base64"
- "encoding/json"
- "errors"
- "fmt"
-
- sq "github.com/Masterminds/squirrel"
- "github.com/google/uuid"
- "google.golang.org/protobuf/types/known/structpb"
-
- errs "go.flipt.io/flipt/errors"
- "go.flipt.io/flipt/internal/storage"
- fliptsql "go.flipt.io/flipt/internal/storage/sql"
- flipt "go.flipt.io/flipt/rpc/flipt"
-)
-
-func compactJSONString(jsonString string) (string, error) {
- var buf bytes.Buffer
- if err := json.Compact(&buf, []byte(jsonString)); err != nil {
- return "", err
- }
- return buf.String(), nil
-}
-
-func emptyAsNil(str string) *string {
- if str == "" {
- return nil
- }
- return &str
-}
-
-// GetFlag gets a flag with variants by key
-func (s *Store) GetFlag(ctx context.Context, p storage.ResourceRequest) (*flipt.Flag, error) {
- var (
- createdAt fliptsql.Timestamp
- updatedAt fliptsql.Timestamp
- defaultVariantId sql.NullString
- metadata fliptsql.JSONField[map[string]any]
- flag = &flipt.Flag{}
- err = s.builder.Select("namespace_key, \"key\", \"type\", name, description, enabled, created_at, updated_at, default_variant_id", "metadata").
- From("flags").
- Where(sq.Eq{"namespace_key": p.Namespace(), "\"key\"": p.Key}).
- QueryRowContext(ctx).
- Scan(
- &flag.NamespaceKey,
- &flag.Key,
- &flag.Type,
- &flag.Name,
- &flag.Description,
- &flag.Enabled,
- &createdAt,
- &updatedAt,
- &defaultVariantId,
- &metadata)
- )
-
- if err != nil {
- if errors.Is(err, sql.ErrNoRows) {
- return nil, errs.ErrNotFoundf("flag %q", p)
- }
-
- return nil, err
- }
-
- flag.CreatedAt = createdAt.Timestamp
- flag.UpdatedAt = updatedAt.Timestamp
- if metadata.T != nil {
- flag.Metadata, _ = structpb.NewStruct(metadata.T)
- }
-
- query := s.builder.Select("id, namespace_key, flag_key, \"key\", name, description, attachment, created_at, updated_at").
- From("variants").
- Where(sq.Eq{"namespace_key": flag.NamespaceKey, "flag_key": flag.Key}).
- OrderBy("created_at ASC")
-
- rows, err := query.QueryContext(ctx)
- if err != nil {
- return flag, err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- for rows.Next() {
- var (
- variant flipt.Variant
- createdAt, updatedAt fliptsql.Timestamp
- attachment sql.NullString
- )
-
- if err := rows.Scan(
- &variant.Id,
- &variant.NamespaceKey,
- &variant.FlagKey,
- &variant.Key,
- &variant.Name,
- &variant.Description,
- &attachment,
- &createdAt,
- &updatedAt); err != nil {
- return flag, err
- }
-
- variant.CreatedAt = createdAt.Timestamp
- variant.UpdatedAt = updatedAt.Timestamp
-
- if attachment.Valid {
- compactedAttachment, err := compactJSONString(attachment.String)
- if err != nil {
- return flag, err
- }
- variant.Attachment = compactedAttachment
- }
-
- if defaultVariantId.Valid && variant.Id == defaultVariantId.String {
- flag.DefaultVariant = &variant
- }
-
- flag.Variants = append(flag.Variants, &variant)
- }
-
- return flag, rows.Err()
-}
-
-type optionalVariant struct {
- Id sql.NullString
- NamespaceKey sql.NullString
- Key sql.NullString
- FlagKey sql.NullString
- Name sql.NullString
- Description sql.NullString
- Attachment sql.NullString
- CreatedAt fliptsql.NullableTimestamp
- UpdatedAt fliptsql.NullableTimestamp
-}
-
-type flagWithDefaultVariant struct {
- *flipt.Flag
- DefaultVariantId sql.NullString
-}
-
-// ListFlags lists all flags with variants
-func (s *Store) ListFlags(ctx context.Context, req *storage.ListRequest[storage.NamespaceRequest]) (storage.ResultSet[*flipt.Flag], error) {
- var (
- flags []*flipt.Flag
- results = storage.ResultSet[*flipt.Flag]{}
-
- query = s.builder.Select("namespace_key, \"key\", \"type\", name, description, enabled, created_at, updated_at, default_variant_id, metadata").
- From("flags").
- Where(sq.Eq{"namespace_key": req.Predicate.Namespace()}).
- OrderBy(fmt.Sprintf("created_at %s", req.QueryParams.Order))
- )
-
- if req.QueryParams.Limit > 0 {
- query = query.Limit(req.QueryParams.Limit + 1)
- }
-
- var offset uint64
-
- if req.QueryParams.PageToken != "" {
- token, err := decodePageToken(s.logger, req.QueryParams.PageToken)
- if err != nil {
- return results, err
- }
-
- offset = token.Offset
- query = query.Offset(offset)
- } else if req.QueryParams.Offset > 0 {
- offset = req.QueryParams.Offset
- query = query.Offset(offset)
- }
-
- rows, err := query.QueryContext(ctx)
- if err != nil {
- return results, err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- // keep track of flags so we can associated variants in second query.
- flagsByKey := make(map[string]*flagWithDefaultVariant)
- for rows.Next() {
- var (
- flag = &flipt.Flag{}
-
- fCreatedAt fliptsql.Timestamp
- fUpdatedAt fliptsql.Timestamp
- fDefaultVariantId sql.NullString
- fMetadata fliptsql.JSONField[map[string]any]
- )
-
- if err := rows.Scan(
- &flag.NamespaceKey,
- &flag.Key,
- &flag.Type,
- &flag.Name,
- &flag.Description,
- &flag.Enabled,
- &fCreatedAt,
- &fUpdatedAt,
- &fDefaultVariantId,
- &fMetadata,
- ); err != nil {
- return results, err
- }
-
- flag.CreatedAt = fCreatedAt.Timestamp
- flag.UpdatedAt = fUpdatedAt.Timestamp
- flag.Metadata, _ = structpb.NewStruct(fMetadata.T)
-
- flags = append(flags, flag)
- flagsByKey[flag.Key] = &flagWithDefaultVariant{Flag: flag, DefaultVariantId: fDefaultVariantId}
- }
-
- if err := rows.Err(); err != nil {
- return results, err
- }
-
- if err := rows.Close(); err != nil {
- return results, err
- }
-
- if err := s.setVariants(ctx, req.Predicate.Namespace(), flagsByKey); err != nil {
- return results, err
- }
-
- var next *flipt.Flag
-
- if len(flags) > int(req.QueryParams.Limit) && req.QueryParams.Limit > 0 {
- next = flags[len(flags)-1]
- flags = flags[:req.QueryParams.Limit]
- }
-
- results.Results = flags
-
- if next != nil {
- out, err := json.Marshal(PageToken{Key: next.Key, Offset: offset + uint64(len(flags))})
- if err != nil {
- return results, fmt.Errorf("encoding page token %w", err)
- }
- results.NextPageToken = base64.StdEncoding.EncodeToString(out)
- }
-
- return results, nil
-}
-
-func (s *Store) setVariants(ctx context.Context, namespaceKey string, flagsByKey map[string]*flagWithDefaultVariant) error {
- allFlagKeys := make([]string, 0, len(flagsByKey))
- for k := range flagsByKey {
- allFlagKeys = append(allFlagKeys, k)
- }
-
- query := s.builder.Select("id, namespace_key, \"key\", flag_key, name, description, attachment, created_at, updated_at").
- From("variants").
- Where(sq.Eq{"namespace_key": namespaceKey, "flag_key": allFlagKeys}).
- OrderBy("created_at")
-
- rows, err := query.QueryContext(ctx)
- if err != nil {
- return err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- for rows.Next() {
- var (
- variant optionalVariant
- vCreatedAt fliptsql.NullableTimestamp
- vUpdatedAt fliptsql.NullableTimestamp
- )
-
- if err := rows.Scan(
- &variant.Id,
- &variant.NamespaceKey,
- &variant.Key,
- &variant.FlagKey,
- &variant.Name,
- &variant.Description,
- &variant.Attachment,
- &vCreatedAt,
- &vUpdatedAt); err != nil {
- return err
- }
-
- if flag, ok := flagsByKey[variant.FlagKey.String]; ok {
- v := &flipt.Variant{
- Id: variant.Id.String,
- NamespaceKey: variant.NamespaceKey.String,
- Key: variant.Key.String,
- FlagKey: variant.FlagKey.String,
- Name: variant.Name.String,
- Description: variant.Description.String,
- Attachment: variant.Attachment.String,
- CreatedAt: vCreatedAt.Timestamp,
- UpdatedAt: vUpdatedAt.Timestamp,
- }
-
- flag.Variants = append(flag.Variants, v)
-
- if flag.DefaultVariantId.Valid && variant.Id.String == flag.DefaultVariantId.String {
- flag.DefaultVariant = v
- }
- }
- }
-
- if err := rows.Err(); err != nil {
- return err
- }
-
- return rows.Close()
-}
-
-// CountFlags counts all flags
-func (s *Store) CountFlags(ctx context.Context, p storage.NamespaceRequest) (uint64, error) {
- var count uint64
-
- if err := s.builder.Select("COUNT(*)").
- From("flags").
- Where(sq.Eq{"namespace_key": p.Namespace()}).
- QueryRowContext(ctx).
- Scan(&count); err != nil {
- return 0, err
- }
-
- return count, nil
-}
-
-// CreateFlag creates a flag
-func (s *Store) CreateFlag(ctx context.Context, r *flipt.CreateFlagRequest) (_ *flipt.Flag, err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- var (
- now = flipt.Now()
- flag = &flipt.Flag{
- NamespaceKey: r.NamespaceKey,
- Key: r.Key,
- Type: r.Type,
- Name: r.Name,
- Description: r.Description,
- Enabled: r.Enabled,
- Metadata: r.Metadata,
- CreatedAt: now,
- UpdatedAt: now,
- }
- metadata any
- )
-
- if flag.Metadata != nil && len(flag.Metadata.Fields) > 0 {
- metadata = &fliptsql.JSONField[map[string]any]{T: flag.Metadata.AsMap()}
- }
-
- if _, err := s.builder.Insert("flags").
- Columns("namespace_key", "\"key\"", "\"type\"", "name", "description", "enabled", "metadata", "created_at", "updated_at").
- Values(
- flag.NamespaceKey,
- flag.Key,
- int32(flag.Type),
- flag.Name,
- flag.Description,
- flag.Enabled,
- metadata,
- &fliptsql.Timestamp{Timestamp: flag.CreatedAt},
- &fliptsql.Timestamp{Timestamp: flag.UpdatedAt},
- ).
- ExecContext(ctx); err != nil {
- return nil, err
- }
-
- return flag, nil
-}
-
-// UpdateFlag updates an existing flag
-func (s *Store) UpdateFlag(ctx context.Context, r *flipt.UpdateFlagRequest) (_ *flipt.Flag, err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- // if default variant is set, check that the variant and flag exist in the same namespace
- if r.DefaultVariantId != "" {
- var count uint64
-
- if err := s.builder.Select("COUNT(id)").
- From("variants").
- Where(sq.Eq{"id": r.DefaultVariantId, "namespace_key": r.NamespaceKey, "flag_key": r.Key}).
- QueryRowContext(ctx).
- Scan(&count); err != nil {
- return nil, err
- }
-
- if count != 1 {
- return nil, errs.ErrInvalidf(`variant %q not found for flag "%s/%s"`, r.DefaultVariantId, r.NamespaceKey, r.Key)
- }
- }
-
- query := s.builder.Update("flags").
- Set("name", r.Name).
- Set("description", r.Description).
- Set("enabled", r.Enabled).
- Set("updated_at", &fliptsql.Timestamp{Timestamp: flipt.Now()})
-
- if r.Metadata != nil {
- if len(r.Metadata.Fields) > 0 {
- query = query.Set("metadata", &fliptsql.JSONField[map[string]any]{T: r.Metadata.AsMap()})
- } else {
- query = query.Set("metadata", nil)
- }
- }
-
- if r.DefaultVariantId != "" {
- query = query.Set("default_variant_id", r.DefaultVariantId)
- } else {
- query = query.Set("default_variant_id", nil)
- }
-
- query = query.
- Where(sq.And{sq.Eq{"namespace_key": r.NamespaceKey}, sq.Eq{"\"key\"": r.Key}})
-
- res, err := query.ExecContext(ctx)
- if err != nil {
- return nil, err
- }
-
- count, err := res.RowsAffected()
- if err != nil {
- return nil, err
- }
-
- p := storage.NewResource(r.NamespaceKey, r.Key)
-
- if count != 1 {
- return nil, errs.ErrNotFoundf("flag %q", p)
- }
-
- return s.GetFlag(ctx, p)
-}
-
-// DeleteFlag deletes a flag
-func (s *Store) DeleteFlag(ctx context.Context, r *flipt.DeleteFlagRequest) error {
- defer func() {
- _ = s.setVersion(ctx, r.NamespaceKey)
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- _, err := s.builder.Delete("flags").
- Where(sq.And{sq.Eq{"namespace_key": r.NamespaceKey}, sq.Eq{"\"key\"": r.Key}}).
- ExecContext(ctx)
-
- return err
-}
-
-// CreateVariant creates a variant
-func (s *Store) CreateVariant(ctx context.Context, r *flipt.CreateVariantRequest) (*flipt.Variant, error) {
- defer func() {
- _ = s.setVersion(ctx, r.NamespaceKey)
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- var (
- now = flipt.Now()
- v = &flipt.Variant{
- Id: uuid.NewString(),
- NamespaceKey: r.NamespaceKey,
- FlagKey: r.FlagKey,
- Key: r.Key,
- Name: r.Name,
- Description: r.Description,
- Attachment: r.Attachment,
- CreatedAt: now,
- UpdatedAt: now,
- }
- )
-
- attachment := emptyAsNil(r.Attachment)
- if _, err := s.builder.Insert("variants").
- Columns("id", "namespace_key", "flag_key", "\"key\"", "name", "description", "attachment", "created_at", "updated_at").
- Values(
- v.Id,
- v.NamespaceKey,
- v.FlagKey,
- v.Key,
- v.Name,
- v.Description,
- attachment,
- &fliptsql.Timestamp{Timestamp: v.CreatedAt},
- &fliptsql.Timestamp{Timestamp: v.UpdatedAt},
- ).
- ExecContext(ctx); err != nil {
- return nil, err
- }
-
- if attachment != nil {
- compactedAttachment, err := compactJSONString(*attachment)
- if err != nil {
- return nil, err
- }
- v.Attachment = compactedAttachment
- }
-
- return v, nil
-}
-
-// UpdateVariant updates an existing variant
-func (s *Store) UpdateVariant(ctx context.Context, r *flipt.UpdateVariantRequest) (_ *flipt.Variant, err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- whereClause := sq.And{sq.Eq{"id": r.Id}, sq.Eq{"flag_key": r.FlagKey}, sq.Eq{"namespace_key": r.NamespaceKey}}
-
- query := s.builder.Update("variants").
- Set("\"key\"", r.Key).
- Set("name", r.Name).
- Set("description", r.Description).
- Set("attachment", emptyAsNil(r.Attachment)).
- Set("updated_at", &fliptsql.Timestamp{Timestamp: flipt.Now()}).
- Where(whereClause)
-
- res, err := query.ExecContext(ctx)
- if err != nil {
- return nil, err
- }
-
- count, err := res.RowsAffected()
- if err != nil {
- return nil, err
- }
-
- if count != 1 {
- return nil, errs.ErrNotFoundf("variant %q", r.Key)
- }
-
- var (
- attachment sql.NullString
- createdAt fliptsql.Timestamp
- updatedAt fliptsql.Timestamp
-
- v = &flipt.Variant{}
- )
-
- if err := s.builder.Select("id, namespace_key, \"key\", flag_key, name, description, attachment, created_at, updated_at").
- From("variants").
- Where(whereClause).
- QueryRowContext(ctx).
- Scan(&v.Id, &v.NamespaceKey, &v.Key, &v.FlagKey, &v.Name, &v.Description, &attachment, &createdAt, &updatedAt); err != nil {
- return nil, err
- }
-
- v.CreatedAt = createdAt.Timestamp
- v.UpdatedAt = updatedAt.Timestamp
- if attachment.Valid {
- compactedAttachment, err := compactJSONString(attachment.String)
- if err != nil {
- return nil, err
- }
- v.Attachment = compactedAttachment
- }
-
- return v, nil
-}
-
-// DeleteVariant deletes a variant
-func (s *Store) DeleteVariant(ctx context.Context, r *flipt.DeleteVariantRequest) (err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- _, err = s.builder.Delete("variants").
- Where(sq.And{sq.Eq{"id": r.Id}, sq.Eq{"flag_key": r.FlagKey}, sq.Eq{"namespace_key": r.NamespaceKey}}).
- ExecContext(ctx)
-
- return err
-}
diff --git a/internal/storage/sql/common/namespace.go b/internal/storage/sql/common/namespace.go
deleted file mode 100644
index a27246bb17..0000000000
--- a/internal/storage/sql/common/namespace.go
+++ /dev/null
@@ -1,231 +0,0 @@
-package common
-
-import (
- "context"
- "database/sql"
- "encoding/base64"
- "encoding/json"
- "errors"
- "fmt"
-
- sq "github.com/Masterminds/squirrel"
- errs "go.flipt.io/flipt/errors"
- "go.flipt.io/flipt/internal/storage"
- fliptsql "go.flipt.io/flipt/internal/storage/sql"
- flipt "go.flipt.io/flipt/rpc/flipt"
-)
-
-func (s *Store) GetNamespace(ctx context.Context, p storage.NamespaceRequest) (*flipt.Namespace, error) {
- var (
- createdAt fliptsql.Timestamp
- updatedAt fliptsql.Timestamp
-
- namespace = &flipt.Namespace{}
-
- err = s.builder.Select("\"key\", name, description, protected, created_at, updated_at").
- From("namespaces").
- Where(sq.Eq{"\"key\"": p.Namespace()}).
- QueryRowContext(ctx).
- Scan(
- &namespace.Key,
- &namespace.Name,
- &namespace.Description,
- &namespace.Protected,
- &createdAt,
- &updatedAt)
- )
-
- if err != nil {
- if errors.Is(err, sql.ErrNoRows) {
- return nil, errs.ErrNotFoundf("namespace %q", p)
- }
-
- return nil, err
- }
-
- namespace.CreatedAt = createdAt.Timestamp
- namespace.UpdatedAt = updatedAt.Timestamp
-
- return namespace, nil
-}
-
-func (s *Store) ListNamespaces(ctx context.Context, req *storage.ListRequest[storage.ReferenceRequest]) (storage.ResultSet[*flipt.Namespace], error) {
- var (
- namespaces []*flipt.Namespace
- results = storage.ResultSet[*flipt.Namespace]{}
-
- query = s.builder.Select("\"key\", name, description, protected, created_at, updated_at").
- From("namespaces").
- OrderBy(fmt.Sprintf("created_at %s", req.QueryParams.Order))
- )
-
- if req.QueryParams.Limit > 0 {
- query = query.Limit(req.QueryParams.Limit + 1)
- }
-
- var offset uint64
-
- if req.QueryParams.PageToken != "" {
- token, err := decodePageToken(s.logger, req.QueryParams.PageToken)
- if err != nil {
- return results, err
- }
-
- offset = token.Offset
- query = query.Offset(offset)
- } else if req.QueryParams.Offset > 0 {
- offset = req.QueryParams.Offset
- query = query.Offset(offset)
- }
-
- rows, err := query.QueryContext(ctx)
- if err != nil {
- return results, err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- for rows.Next() {
- var (
- namespace = &flipt.Namespace{}
-
- createdAt fliptsql.Timestamp
- updatedAt fliptsql.Timestamp
- )
-
- if err := rows.Scan(
- &namespace.Key,
- &namespace.Name,
- &namespace.Description,
- &namespace.Protected,
- &createdAt,
- &updatedAt,
- ); err != nil {
- return results, err
- }
-
- namespace.CreatedAt = createdAt.Timestamp
- namespace.UpdatedAt = updatedAt.Timestamp
-
- namespaces = append(namespaces, namespace)
- }
-
- if err := rows.Err(); err != nil {
- return results, err
- }
-
- if err := rows.Close(); err != nil {
- return results, err
- }
-
- var next *flipt.Namespace
-
- if len(namespaces) > int(req.QueryParams.Limit) && req.QueryParams.Limit > 0 {
- next = namespaces[len(namespaces)-1]
- namespaces = namespaces[:req.QueryParams.Limit]
- }
-
- results.Results = namespaces
-
- if next != nil {
- out, err := json.Marshal(PageToken{Key: next.Key, Offset: offset + uint64(len(namespaces))})
- if err != nil {
- return results, fmt.Errorf("encoding page token %w", err)
- }
- results.NextPageToken = base64.StdEncoding.EncodeToString(out)
- }
-
- return results, nil
-}
-
-func (s *Store) CountNamespaces(ctx context.Context, _ storage.ReferenceRequest) (uint64, error) {
- var count uint64
-
- if err := s.builder.Select("COUNT(*)").
- From("namespaces").
- QueryRowContext(ctx).
- Scan(&count); err != nil {
- return 0, err
- }
-
- return count, nil
-}
-
-func (s *Store) CreateNamespace(ctx context.Context, r *flipt.CreateNamespaceRequest) (_ *flipt.Namespace, err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.Key)
- }
- }()
-
- var (
- now = flipt.Now()
- namespace = &flipt.Namespace{
- Key: r.Key,
- Name: r.Name,
- Description: r.Description,
- CreatedAt: now,
- UpdatedAt: now,
- }
- )
-
- if _, err := s.builder.Insert("namespaces").
- Columns("\"key\"", "name", "description", "created_at", "updated_at").
- Values(
- namespace.Key,
- namespace.Name,
- namespace.Description,
- &fliptsql.Timestamp{Timestamp: namespace.CreatedAt},
- &fliptsql.Timestamp{Timestamp: namespace.UpdatedAt},
- ).
- ExecContext(ctx); err != nil {
- return nil, err
- }
-
- return namespace, nil
-}
-
-func (s *Store) UpdateNamespace(ctx context.Context, r *flipt.UpdateNamespaceRequest) (_ *flipt.Namespace, err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.Key)
- }
- }()
-
- query := s.builder.Update("namespaces").
- Set("name", r.Name).
- Set("description", r.Description).
- Set("updated_at", &fliptsql.Timestamp{Timestamp: flipt.Now()}).
- Where(sq.Eq{"\"key\"": r.Key})
-
- res, err := query.ExecContext(ctx)
- if err != nil {
- return nil, err
- }
-
- count, err := res.RowsAffected()
- if err != nil {
- return nil, err
- }
-
- p := storage.NewNamespace(r.Key)
-
- if count != 1 {
- return nil, errs.ErrNotFoundf("namespace %q", p)
- }
-
- return s.GetNamespace(ctx, p)
-}
-
-func (s *Store) DeleteNamespace(ctx context.Context, r *flipt.DeleteNamespaceRequest) (err error) {
-
- _, err = s.builder.Delete("namespaces").
- Where(sq.Eq{"\"key\"": r.Key}).
- ExecContext(ctx)
-
- return err
-}
diff --git a/internal/storage/sql/common/rollout.go b/internal/storage/sql/common/rollout.go
deleted file mode 100644
index 07160aa665..0000000000
--- a/internal/storage/sql/common/rollout.go
+++ /dev/null
@@ -1,784 +0,0 @@
-package common
-
-import (
- "context"
- "database/sql"
- "encoding/base64"
- "encoding/json"
- "errors"
- "fmt"
-
- sq "github.com/Masterminds/squirrel"
- "github.com/google/uuid"
- errs "go.flipt.io/flipt/errors"
- "go.flipt.io/flipt/internal/storage"
- fliptsql "go.flipt.io/flipt/internal/storage/sql"
- "go.flipt.io/flipt/rpc/flipt"
-)
-
-const (
- tableRollouts = "rollouts"
- tableRolloutPercentages = "rollout_thresholds"
- tableRolloutSegments = "rollout_segments"
- tableRolloutSegmentReferences = "rollout_segment_references"
-)
-
-func (s *Store) GetRollout(ctx context.Context, ns storage.NamespaceRequest, id string) (*flipt.Rollout, error) {
- return getRollout(ctx, s.builder, ns, id)
-}
-
-func getRollout(ctx context.Context, builder sq.StatementBuilderType, ns storage.NamespaceRequest, id string) (*flipt.Rollout, error) {
- var (
- createdAt fliptsql.Timestamp
- updatedAt fliptsql.Timestamp
-
- rollout = &flipt.Rollout{}
-
- err = builder.Select("id, namespace_key, flag_key, \"type\", \"rank\", description, created_at, updated_at").
- From(tableRollouts).
- Where(sq.Eq{"id": id, "namespace_key": ns.Namespace()}).
- QueryRowContext(ctx).
- Scan(
- &rollout.Id,
- &rollout.NamespaceKey,
- &rollout.FlagKey,
- &rollout.Type,
- &rollout.Rank,
- &rollout.Description,
- &createdAt,
- &updatedAt)
- )
-
- if err != nil {
- if errors.Is(err, sql.ErrNoRows) {
- return nil, errs.ErrNotFoundf(`rollout "%s/%s"`, ns.Namespace(), id)
- }
-
- return nil, err
- }
-
- rollout.CreatedAt = createdAt.Timestamp
- rollout.UpdatedAt = updatedAt.Timestamp
-
- switch rollout.Type {
- case flipt.RolloutType_SEGMENT_ROLLOUT_TYPE:
- segmentRule := &flipt.Rollout_Segment{
- Segment: &flipt.RolloutSegment{},
- }
-
- var (
- value bool
- rolloutSegmentId string
- segmentOperator flipt.SegmentOperator
- )
- if err := builder.Select("id, \"value\", segment_operator").
- From(tableRolloutSegments).
- Where(sq.Eq{"rollout_id": rollout.Id}).
- Limit(1).
- QueryRowContext(ctx).
- Scan(&rolloutSegmentId, &value, &segmentOperator); err != nil {
- return nil, err
- }
-
- segmentRule.Segment.Value = value
- segmentRule.Segment.SegmentOperator = segmentOperator
-
- rows, err := builder.Select("segment_key").
- From(tableRolloutSegmentReferences).
- Where(sq.Eq{"rollout_segment_id": rolloutSegmentId, "namespace_key": rollout.NamespaceKey}).
- QueryContext(ctx)
- if err != nil {
- return nil, err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- segmentKeys := []string{}
-
- for rows.Next() {
- var segmentKey string
-
- if err := rows.Scan(&segmentKey); err != nil {
- return nil, err
- }
-
- segmentKeys = append(segmentKeys, segmentKey)
- }
-
- if err := rows.Err(); err != nil {
- return nil, err
- }
-
- if len(segmentKeys) == 1 {
- segmentRule.Segment.SegmentKey = segmentKeys[0]
- } else {
- segmentRule.Segment.SegmentKeys = segmentKeys
- }
-
- rollout.Rule = segmentRule
- case flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE:
- thresholdRule := &flipt.Rollout_Threshold{
- Threshold: &flipt.RolloutThreshold{},
- }
-
- if err := builder.Select("percentage, \"value\"").
- From(tableRolloutPercentages).
- Where(sq.Eq{"rollout_id": rollout.Id, "namespace_key": rollout.NamespaceKey}).
- Limit(1).
- QueryRowContext(ctx).
- Scan(
- &thresholdRule.Threshold.Percentage,
- &thresholdRule.Threshold.Value); err != nil {
- return nil, err
- }
-
- rollout.Rule = thresholdRule
-
- default:
- return nil, fmt.Errorf("unknown rollout type %v", rollout.Type)
- }
-
- return rollout, nil
-}
-
-func (s *Store) ListRollouts(ctx context.Context, req *storage.ListRequest[storage.ResourceRequest]) (storage.ResultSet[*flipt.Rollout], error) {
- var (
- rollouts []*flipt.Rollout
- results = storage.ResultSet[*flipt.Rollout]{}
-
- query = s.builder.Select("id, namespace_key, flag_key, \"type\", \"rank\", description, created_at, updated_at").
- From(tableRollouts).
- Where(sq.Eq{"flag_key": req.Predicate.Key, "namespace_key": req.Predicate.Namespace()}).
- OrderBy(fmt.Sprintf("\"rank\" %s", req.QueryParams.Order))
- )
-
- if req.QueryParams.Limit > 0 {
- query = query.Limit(req.QueryParams.Limit + 1)
- }
-
- var offset uint64
-
- if req.QueryParams.PageToken != "" {
- token, err := decodePageToken(s.logger, req.QueryParams.PageToken)
- if err != nil {
- return results, err
- }
-
- offset = token.Offset
- query = query.Offset(offset)
- }
-
- rows, err := query.QueryContext(ctx)
- if err != nil {
- return results, err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- var (
- rolloutsById = map[string]*flipt.Rollout{}
- rolloutsByType = map[flipt.RolloutType][]*flipt.Rollout{}
- )
-
- for rows.Next() {
- var (
- rollout = &flipt.Rollout{}
- rCreatedAt fliptsql.Timestamp
- rUpdatedAt fliptsql.Timestamp
- )
-
- if err := rows.Scan(
- &rollout.Id,
- &rollout.NamespaceKey,
- &rollout.FlagKey,
- &rollout.Type,
- &rollout.Rank,
- &rollout.Description,
- &rCreatedAt,
- &rUpdatedAt); err != nil {
- return results, err
- }
-
- rollout.CreatedAt = rCreatedAt.Timestamp
- rollout.UpdatedAt = rUpdatedAt.Timestamp
-
- rollouts = append(rollouts, rollout)
- rolloutsById[rollout.Id] = rollout
- rolloutsByType[rollout.Type] = append(rolloutsByType[rollout.Type], rollout)
- }
-
- if err := rows.Err(); err != nil {
- return results, err
- }
-
- if err := rows.Close(); err != nil {
- return results, err
- }
-
- // get all rules from rollout_segment_rules table
- if len(rolloutsByType[flipt.RolloutType_SEGMENT_ROLLOUT_TYPE]) > 0 {
- allRuleIds := make([]string, 0, len(rolloutsByType[flipt.RolloutType_SEGMENT_ROLLOUT_TYPE]))
- for _, rollout := range rolloutsByType[flipt.RolloutType_SEGMENT_ROLLOUT_TYPE] {
- allRuleIds = append(allRuleIds, rollout.Id)
- }
-
- rows, err := s.builder.Select("rs.rollout_id, rs.\"value\", rs.segment_operator, rsr.segment_key").
- From("rollout_segments AS rs").
- Join("rollout_segment_references AS rsr ON (rs.id = rsr.rollout_segment_id)").
- Where(sq.Eq{"rollout_id": allRuleIds}).
- QueryContext(ctx)
- if err != nil {
- return results, err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- type intermediateValues struct {
- segmentKeys []string
- segmentOperator flipt.SegmentOperator
- value bool
- }
-
- intermediate := make(map[string]*intermediateValues)
-
- for rows.Next() {
- var (
- rolloutId string
- segmentKey string
- value bool
- segmentOperator flipt.SegmentOperator
- )
-
- if err := rows.Scan(&rolloutId, &value, &segmentOperator, &segmentKey); err != nil {
- return results, err
- }
-
- rs, ok := intermediate[rolloutId]
- if ok {
- rs.segmentKeys = append(rs.segmentKeys, segmentKey)
- } else {
- intermediate[rolloutId] = &intermediateValues{
- segmentKeys: []string{segmentKey},
- segmentOperator: segmentOperator,
- value: value,
- }
- }
- }
-
- for k, v := range intermediate {
- rollout := rolloutsById[k]
- rs := &flipt.RolloutSegment{}
-
- if len(v.segmentKeys) == 1 {
- rs.SegmentKey = v.segmentKeys[0]
- } else {
- rs.SegmentKeys = v.segmentKeys
- }
-
- rs.Value = v.value
- rs.SegmentOperator = v.segmentOperator
-
- rollout.Rule = &flipt.Rollout_Segment{Segment: rs}
- }
-
- if err := rows.Err(); err != nil {
- return results, err
- }
- }
-
- // get all rules from rollout_percentage_rules table
- if len(rolloutsByType[flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE]) > 0 {
- allRuleIds := make([]string, 0, len(rolloutsByType[flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE]))
- for _, rollout := range rolloutsByType[flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE] {
- allRuleIds = append(allRuleIds, rollout.Id)
- }
-
- rows, err := s.builder.Select("rollout_id, percentage, \"value\"").
- From(tableRolloutPercentages).
- Where(sq.Eq{"rollout_id": allRuleIds}).
- QueryContext(ctx)
- if err != nil {
- return results, err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- for rows.Next() {
- var (
- rolloutId string
- rule = &flipt.RolloutThreshold{}
- )
-
- if err := rows.Scan(&rolloutId, &rule.Percentage, &rule.Value); err != nil {
- return results, err
- }
-
- rollout := rolloutsById[rolloutId]
- rollout.Rule = &flipt.Rollout_Threshold{Threshold: rule}
- }
-
- if err := rows.Err(); err != nil {
- return results, err
- }
- }
-
- var next *flipt.Rollout
-
- if len(rollouts) > int(req.QueryParams.Limit) && req.QueryParams.Limit > 0 {
- next = rollouts[len(rollouts)-1]
- rollouts = rollouts[:req.QueryParams.Limit]
- }
-
- results.Results = rollouts
-
- if next != nil {
- out, err := json.Marshal(PageToken{Key: next.Id, Offset: offset + uint64(len(rollouts))})
- if err != nil {
- return results, fmt.Errorf("encoding page token %w", err)
- }
- results.NextPageToken = base64.StdEncoding.EncodeToString(out)
- }
-
- return results, nil
-}
-
-// CountRollouts counts all rollouts
-func (s *Store) CountRollouts(ctx context.Context, flag storage.ResourceRequest) (uint64, error) {
- var count uint64
-
- if err := s.builder.Select("COUNT(*)").
- From(tableRollouts).
- Where(sq.Eq{"namespace_key": flag.Namespace(), "flag_key": flag.Key}).
- QueryRowContext(ctx).
- Scan(&count); err != nil {
- return 0, err
- }
-
- return count, nil
-}
-
-func (s *Store) CreateRollout(ctx context.Context, r *flipt.CreateRolloutRequest) (_ *flipt.Rollout, err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- var count uint64
-
- if err := s.builder.Select("COUNT(*)").
- From(tableRollouts).
- Where(sq.And{sq.Eq{"namespace_key": r.NamespaceKey}, sq.Eq{"flag_key": r.FlagKey}, sq.Eq{"\"rank\"": r.Rank}}).
- QueryRowContext(ctx).
- Scan(&count); err != nil {
- return nil, err
- }
-
- if count > 0 {
- return nil, errs.ErrInvalidf("rank number: %d already exists", r.Rank)
- }
-
- var (
- now = flipt.Now()
- rollout = &flipt.Rollout{
- Id: uuid.NewString(),
- NamespaceKey: r.NamespaceKey,
- FlagKey: r.FlagKey,
- Rank: r.Rank,
- Description: r.Description,
- CreatedAt: now,
- UpdatedAt: now,
- }
- )
-
- switch r.GetRule().(type) {
- case *flipt.CreateRolloutRequest_Segment:
- rollout.Type = flipt.RolloutType_SEGMENT_ROLLOUT_TYPE
- case *flipt.CreateRolloutRequest_Threshold:
- rollout.Type = flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE
- case nil:
- return nil, errs.ErrInvalid("rollout rule is missing")
- default:
- return nil, errs.ErrInvalidf("invalid rollout rule type %T", r.GetRule())
- }
-
- tx, err := s.db.Begin()
- if err != nil {
- return nil, err
- }
-
- defer func() {
- if err != nil {
- _ = tx.Rollback()
- }
- }()
-
- if _, err := s.builder.Insert(tableRollouts).
- RunWith(tx).
- Columns("id", "namespace_key", "flag_key", "\"type\"", "\"rank\"", "description", "created_at", "updated_at").
- Values(rollout.Id, rollout.NamespaceKey, rollout.FlagKey, int32(rollout.Type), rollout.Rank, rollout.Description,
- &fliptsql.Timestamp{Timestamp: rollout.CreatedAt},
- &fliptsql.Timestamp{Timestamp: rollout.UpdatedAt},
- ).ExecContext(ctx); err != nil {
- return nil, err
- }
-
- switch r.GetRule().(type) {
- case *flipt.CreateRolloutRequest_Segment:
- rollout.Type = flipt.RolloutType_SEGMENT_ROLLOUT_TYPE
- rolloutSegmentId := uuid.NewString()
-
- segmentRule := r.GetSegment()
-
- segmentKeys := sanitizeSegmentKeys(segmentRule.GetSegmentKey(), segmentRule.GetSegmentKeys())
-
- segmentOperator := segmentRule.SegmentOperator
- if len(segmentKeys) == 1 {
- segmentOperator = flipt.SegmentOperator_OR_SEGMENT_OPERATOR
- }
-
- if _, err := s.builder.Insert(tableRolloutSegments).
- RunWith(tx).
- Columns("id", "rollout_id", "\"value\"", "segment_operator").
- Values(rolloutSegmentId, rollout.Id, segmentRule.Value, int32(segmentOperator)).
- ExecContext(ctx); err != nil {
- return nil, err
- }
-
- for _, segmentKey := range segmentKeys {
- if _, err := s.builder.Insert(tableRolloutSegmentReferences).
- RunWith(tx).
- Columns("rollout_segment_id", "namespace_key", "segment_key").
- Values(rolloutSegmentId, rollout.NamespaceKey, segmentKey).
- ExecContext(ctx); err != nil {
- return nil, err
- }
- }
-
- innerSegment := &flipt.RolloutSegment{
- Value: segmentRule.Value,
- SegmentOperator: segmentOperator,
- }
-
- if len(segmentKeys) == 1 {
- innerSegment.SegmentKey = segmentKeys[0]
- } else {
- innerSegment.SegmentKeys = segmentKeys
- }
-
- rollout.Rule = &flipt.Rollout_Segment{
- Segment: innerSegment,
- }
- case *flipt.CreateRolloutRequest_Threshold:
- rollout.Type = flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE
-
- thresholdRule := r.GetThreshold()
-
- if _, err := s.builder.Insert(tableRolloutPercentages).
- RunWith(tx).
- Columns("id", "rollout_id", "namespace_key", "percentage", "\"value\"").
- Values(uuid.NewString(), rollout.Id, rollout.NamespaceKey, thresholdRule.Percentage, thresholdRule.Value).
- ExecContext(ctx); err != nil {
- return nil, err
- }
-
- rollout.Rule = &flipt.Rollout_Threshold{
- Threshold: thresholdRule,
- }
- default:
- return nil, fmt.Errorf("invalid rollout rule type %v", rollout.Type)
- }
-
- return rollout, tx.Commit()
-}
-
-func (s *Store) UpdateRollout(ctx context.Context, r *flipt.UpdateRolloutRequest) (_ *flipt.Rollout, err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- if r.Id == "" {
- return nil, errs.ErrInvalid("rollout ID not supplied")
- }
-
- tx, err := s.db.Begin()
- if err != nil {
- return nil, err
- }
-
- defer func() {
- if err != nil {
- _ = tx.Rollback()
- }
- }()
-
- ns := storage.NewNamespace(r.NamespaceKey)
- // get current state for rollout
- rollout, err := getRollout(ctx, s.builder.RunWith(tx), ns, r.Id)
- if err != nil {
- return nil, err
- }
-
- whereClause := sq.And{sq.Eq{"id": r.Id}, sq.Eq{"flag_key": r.FlagKey}, sq.Eq{"namespace_key": r.NamespaceKey}}
-
- query := s.builder.Update(tableRollouts).
- RunWith(tx).
- Set("description", r.Description).
- Set("updated_at", &fliptsql.Timestamp{Timestamp: flipt.Now()}).
- Where(whereClause)
-
- res, err := query.ExecContext(ctx)
- if err != nil {
- return nil, err
- }
-
- count, err := res.RowsAffected()
- if err != nil {
- return nil, err
- }
-
- if count != 1 {
- return nil, errs.ErrNotFoundf(`rollout "%s/%s"`, r.NamespaceKey, r.Id)
- }
-
- switch r.Rule.(type) {
- case *flipt.UpdateRolloutRequest_Segment:
- // enforce that rollout type is consistent with the DB
- if err := ensureRolloutType(rollout, flipt.RolloutType_SEGMENT_ROLLOUT_TYPE); err != nil {
- return nil, err
- }
-
- segmentRule := r.GetSegment()
-
- segmentKeys := sanitizeSegmentKeys(segmentRule.GetSegmentKey(), segmentRule.GetSegmentKeys())
-
- segmentOperator := segmentRule.SegmentOperator
- if len(segmentKeys) == 1 {
- segmentOperator = flipt.SegmentOperator_OR_SEGMENT_OPERATOR
- }
-
- if _, err := s.builder.Update(tableRolloutSegments).
- RunWith(tx).
- Set("segment_operator", segmentOperator).
- Set("value", segmentRule.Value).
- Where(sq.Eq{"rollout_id": r.Id}).ExecContext(ctx); err != nil {
- return nil, err
- }
-
- // Delete and reinsert rollout_segment_references.
- row := s.builder.Select("id").
- RunWith(tx).
- From(tableRolloutSegments).
- Where(sq.Eq{"rollout_id": r.Id}).
- Limit(1).
- QueryRowContext(ctx)
-
- var rolloutSegmentId string
-
- if err := row.Scan(&rolloutSegmentId); err != nil {
- return nil, err
- }
-
- if _, err := s.builder.Delete(tableRolloutSegmentReferences).
- RunWith(tx).
- Where(sq.And{sq.Eq{"rollout_segment_id": rolloutSegmentId}, sq.Eq{"namespace_key": r.NamespaceKey}}).
- ExecContext(ctx); err != nil {
- return nil, err
- }
-
- for _, segmentKey := range segmentKeys {
- if _, err := s.builder.
- Insert(tableRolloutSegmentReferences).
- RunWith(tx).
- Columns("rollout_segment_id", "namespace_key", "segment_key").
- Values(
- rolloutSegmentId,
- r.NamespaceKey,
- segmentKey,
- ).
- ExecContext(ctx); err != nil {
- return nil, err
- }
- }
-
- case *flipt.UpdateRolloutRequest_Threshold:
- // enforce that rollout type is consistent with the DB
- if err := ensureRolloutType(rollout, flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE); err != nil {
- return nil, err
- }
-
- thresholdRule := r.GetThreshold()
-
- if _, err := s.builder.Update(tableRolloutPercentages).
- RunWith(tx).
- Set("percentage", thresholdRule.Percentage).
- Set("value", thresholdRule.Value).
- Where(sq.Eq{"rollout_id": r.Id}).ExecContext(ctx); err != nil {
- return nil, err
- }
- default:
- return nil, errs.InvalidFieldError("rule", "invalid rollout rule type")
- }
-
- if err = tx.Commit(); err != nil {
- return nil, err
- }
-
- rollout, err = getRollout(ctx, s.builder, ns, r.Id)
- if err != nil {
- return nil, err
- }
-
- return rollout, nil
-}
-
-func ensureRolloutType(rollout *flipt.Rollout, typ flipt.RolloutType) error {
- if rollout.Type == typ {
- return nil
- }
-
- return errs.ErrInvalidf(
- "cannot change type of rollout: have %q attempted %q",
- rollout.Type,
- typ,
- )
-}
-
-func (s *Store) DeleteRollout(ctx context.Context, r *flipt.DeleteRolloutRequest) (err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- tx, err := s.db.Begin()
- if err != nil {
- return err
- }
-
- _, err = s.builder.Delete(tableRollouts).
- RunWith(tx).
- Where(sq.And{sq.Eq{"id": r.Id}, sq.Eq{"flag_key": r.FlagKey}, sq.Eq{"namespace_key": r.NamespaceKey}}).
- ExecContext(ctx)
- if err != nil {
- _ = tx.Rollback()
- return err
- }
-
- // reorder existing rollouts after deletion
- rows, err := s.builder.Select("id").
- RunWith(tx).
- From(tableRollouts).
- Where(sq.And{sq.Eq{"namespace_key": r.NamespaceKey}, sq.Eq{"flag_key": r.FlagKey}}).
- OrderBy("\"rank\" ASC").
- QueryContext(ctx)
- if err != nil {
- _ = tx.Rollback()
- return err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- _ = tx.Rollback()
- err = cerr
- }
- }()
-
- var rolloutIDs []string
-
- for rows.Next() {
- var rolloutID string
-
- if err := rows.Scan(&rolloutID); err != nil {
- _ = tx.Rollback()
- return err
- }
-
- rolloutIDs = append(rolloutIDs, rolloutID)
- }
-
- if err := rows.Err(); err != nil {
- _ = tx.Rollback()
- return err
- }
-
- if err := s.orderRollouts(ctx, tx, r.NamespaceKey, r.FlagKey, rolloutIDs); err != nil {
- _ = tx.Rollback()
- return err
- }
-
- return tx.Commit()
-}
-
-// OrderRollouts orders rollouts
-func (s *Store) OrderRollouts(ctx context.Context, r *flipt.OrderRolloutsRequest) (err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- tx, err := s.db.Begin()
- if err != nil {
- return err
- }
-
- if err := s.orderRollouts(ctx, tx, r.NamespaceKey, r.FlagKey, r.RolloutIds); err != nil {
- _ = tx.Rollback()
- return err
- }
-
- return tx.Commit()
-}
-
-func (s *Store) orderRollouts(ctx context.Context, runner sq.BaseRunner, namespaceKey, flagKey string, rolloutIDs []string) error {
- updatedAt := flipt.Now()
-
- for i, id := range rolloutIDs {
- _, err := s.builder.Update(tableRollouts).
- RunWith(runner).
- Set("\"rank\"", i+1).
- Set("updated_at", &fliptsql.Timestamp{Timestamp: updatedAt}).
- Where(sq.And{sq.Eq{"id": id}, sq.Eq{"namespace_key": namespaceKey}, sq.Eq{"flag_key": flagKey}}).
- ExecContext(ctx)
- if err != nil {
- return err
- }
- }
-
- return nil
-}
diff --git a/internal/storage/sql/common/rule.go b/internal/storage/sql/common/rule.go
deleted file mode 100644
index ec508b96af..0000000000
--- a/internal/storage/sql/common/rule.go
+++ /dev/null
@@ -1,770 +0,0 @@
-package common
-
-import (
- "context"
- "database/sql"
- "encoding/base64"
- "encoding/json"
- "errors"
- "fmt"
-
- sq "github.com/Masterminds/squirrel"
- "github.com/google/uuid"
-
- errs "go.flipt.io/flipt/errors"
- "go.flipt.io/flipt/internal/storage"
- fliptsql "go.flipt.io/flipt/internal/storage/sql"
- flipt "go.flipt.io/flipt/rpc/flipt"
-)
-
-// GetRule gets an individual rule with distributions by ID
-func (s *Store) GetRule(ctx context.Context, ns storage.NamespaceRequest, id string) (*flipt.Rule, error) {
- var (
- createdAt fliptsql.Timestamp
- updatedAt fliptsql.Timestamp
-
- rule = &flipt.Rule{}
-
- err = s.builder.Select("id, namespace_key, flag_key, \"rank\", segment_operator, created_at, updated_at").
- From("rules").
- Where(sq.Eq{"id": id, "namespace_key": ns.Namespace()}).
- QueryRowContext(ctx).
- Scan(&rule.Id, &rule.NamespaceKey, &rule.FlagKey, &rule.Rank, &rule.SegmentOperator, &createdAt, &updatedAt)
- )
-
- if err != nil {
- if errors.Is(err, sql.ErrNoRows) {
- return nil, errs.ErrNotFoundf(`rule "%s/%s"`, ns.Namespace(), id)
- }
-
- return nil, err
- }
-
- segmentRows, err := s.builder.Select("segment_key").
- From("rule_segments").
- Where(sq.Eq{"rule_id": rule.Id}).
- QueryContext(ctx)
-
- defer func() {
- if cerr := segmentRows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- segmentKeys := make([]string, 0)
- for segmentRows.Next() {
- var segmentKey string
-
- if err := segmentRows.Scan(&segmentKey); err != nil {
- return nil, err
- }
-
- segmentKeys = append(segmentKeys, segmentKey)
- }
-
- if err := segmentRows.Err(); err != nil {
- return nil, err
- }
-
- if err := segmentRows.Close(); err != nil {
- return nil, err
- }
-
- if len(segmentKeys) == 1 {
- rule.SegmentKey = segmentKeys[0]
- } else {
- rule.SegmentKeys = segmentKeys
- }
-
- rule.CreatedAt = createdAt.Timestamp
- rule.UpdatedAt = updatedAt.Timestamp
-
- query := s.builder.Select("id", "rule_id", "variant_id", "rollout", "created_at", "updated_at").
- From("distributions").
- Where(sq.Eq{"rule_id": rule.Id}).
- OrderBy("created_at ASC")
-
- rows, err := query.QueryContext(ctx)
- if err != nil {
- return rule, err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- for rows.Next() {
- var (
- distribution flipt.Distribution
- createdAt, updatedAt fliptsql.Timestamp
- )
-
- if err := rows.Scan(
- &distribution.Id,
- &distribution.RuleId,
- &distribution.VariantId,
- &distribution.Rollout,
- &createdAt,
- &updatedAt); err != nil {
- return rule, err
- }
-
- distribution.CreatedAt = createdAt.Timestamp
- distribution.UpdatedAt = updatedAt.Timestamp
-
- rule.Distributions = append(rule.Distributions, &distribution)
- }
-
- return rule, rows.Err()
-}
-
-type optionalDistribution struct {
- Id sql.NullString
- RuleId sql.NullString
- VariantId sql.NullString
- Rollout sql.NullFloat64
- CreatedAt fliptsql.NullableTimestamp
- UpdatedAt fliptsql.NullableTimestamp
-}
-
-// ListRules gets all rules for a flag with distributions
-func (s *Store) ListRules(ctx context.Context, req *storage.ListRequest[storage.ResourceRequest]) (storage.ResultSet[*flipt.Rule], error) {
- var (
- rules []*flipt.Rule
- results = storage.ResultSet[*flipt.Rule]{}
-
- query = s.builder.Select("id, namespace_key, flag_key, \"rank\", segment_operator, created_at, updated_at").
- From("rules").
- Where(sq.Eq{"flag_key": req.Predicate.Key, "namespace_key": req.Predicate.Namespace()}).
- OrderBy(fmt.Sprintf("\"rank\" %s", req.QueryParams.Order))
- )
-
- if req.QueryParams.Limit > 0 {
- query = query.Limit(req.QueryParams.Limit + 1)
- }
-
- var offset uint64
-
- if req.QueryParams.PageToken != "" {
- token, err := decodePageToken(s.logger, req.QueryParams.PageToken)
- if err != nil {
- return results, err
- }
-
- offset = token.Offset
- query = query.Offset(offset)
- } else if req.QueryParams.Offset > 0 {
- offset = req.QueryParams.Offset
- query = query.Offset(offset)
- }
-
- rows, err := query.QueryContext(ctx)
- if err != nil {
- return results, err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- rulesById := map[string]*flipt.Rule{}
- for rows.Next() {
- var (
- rule = &flipt.Rule{}
- rCreatedAt fliptsql.Timestamp
- rUpdatedAt fliptsql.Timestamp
- )
-
- if err := rows.Scan(
- &rule.Id,
- &rule.NamespaceKey,
- &rule.FlagKey,
- &rule.Rank,
- &rule.SegmentOperator,
- &rCreatedAt,
- &rUpdatedAt,
- ); err != nil {
- return results, err
- }
-
- rule.CreatedAt = rCreatedAt.Timestamp
- rule.UpdatedAt = rUpdatedAt.Timestamp
-
- rules = append(rules, rule)
- rulesById[rule.Id] = rule
- }
-
- if err := rows.Err(); err != nil {
- return results, err
- }
-
- if err := rows.Close(); err != nil {
- return results, err
- }
-
- // For each rule, find the segment keys and add them to the rule proto definition.
- for _, r := range rules {
- // Since we are querying within a loop, we do not want to defer within the loop
- // so we will disable the sqlclosecheck linter.
- segmentRows, err := s.builder.Select("segment_key").
- From("rule_segments").
- Where(sq.Eq{"rule_id": r.Id}).
- QueryContext(ctx)
- if err != nil {
- //nolint:sqlclosecheck
- _ = segmentRows.Close()
- return results, err
- }
-
- segmentKeys := make([]string, 0)
- for segmentRows.Next() {
- var segmentKey string
- if err := segmentRows.Scan(&segmentKey); err != nil {
- return results, err
- }
-
- segmentKeys = append(segmentKeys, segmentKey)
- }
-
- if err := segmentRows.Err(); err != nil {
- return results, err
- }
-
- //nolint:sqlclosecheck
- if err := segmentRows.Close(); err != nil {
- return results, err
- }
-
- if len(segmentKeys) == 1 {
- r.SegmentKey = segmentKeys[0]
- } else {
- r.SegmentKeys = segmentKeys
- }
- }
-
- if err := s.setDistributions(ctx, rulesById); err != nil {
- return results, err
- }
-
- var next *flipt.Rule
-
- if len(rules) > int(req.QueryParams.Limit) && req.QueryParams.Limit > 0 {
- next = rules[len(rules)-1]
- rules = rules[:req.QueryParams.Limit]
- }
-
- results.Results = rules
-
- if next != nil {
- out, err := json.Marshal(PageToken{Key: next.Id, Offset: offset + uint64(len(rules))})
- if err != nil {
- return results, fmt.Errorf("encoding page token %w", err)
- }
- results.NextPageToken = base64.StdEncoding.EncodeToString(out)
- }
-
- return results, nil
-}
-
-func (s *Store) setDistributions(ctx context.Context, rulesById map[string]*flipt.Rule) error {
- allRuleIds := make([]string, 0, len(rulesById))
- for k := range rulesById {
- allRuleIds = append(allRuleIds, k)
- }
-
- query := s.builder.Select("id, rule_id, variant_id, rollout, created_at, updated_at").
- From("distributions").
- Where(sq.Eq{"rule_id": allRuleIds}).
- OrderBy("created_at")
-
- rows, err := query.QueryContext(ctx)
- if err != nil {
- return err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- for rows.Next() {
- var (
- distribution optionalDistribution
- dCreatedAt fliptsql.NullableTimestamp
- dUpdatedAt fliptsql.NullableTimestamp
- )
-
- if err := rows.Scan(
- &distribution.Id,
- &distribution.RuleId,
- &distribution.VariantId,
- &distribution.Rollout,
- &dCreatedAt,
- &dUpdatedAt,
- ); err != nil {
- return err
- }
-
- if rule, ok := rulesById[distribution.RuleId.String]; ok {
- rule.Distributions = append(rule.Distributions, &flipt.Distribution{
- Id: distribution.Id.String,
- RuleId: distribution.RuleId.String,
- VariantId: distribution.VariantId.String,
- Rollout: float32(distribution.Rollout.Float64),
- CreatedAt: dCreatedAt.Timestamp,
- UpdatedAt: dUpdatedAt.Timestamp,
- })
- }
- }
-
- if err := rows.Err(); err != nil {
- return err
- }
-
- return rows.Close()
-}
-
-// CountRules counts all rules
-func (s *Store) CountRules(ctx context.Context, flag storage.ResourceRequest) (uint64, error) {
- var count uint64
-
- if err := s.builder.Select("COUNT(*)").
- From("rules").
- Where(sq.Eq{"namespace_key": flag.Namespace(), "flag_key": flag.Key}).
- QueryRowContext(ctx).
- Scan(&count); err != nil {
- return 0, err
- }
-
- return count, nil
-}
-
-// CreateRule creates a rule
-func (s *Store) CreateRule(ctx context.Context, r *flipt.CreateRuleRequest) (_ *flipt.Rule, err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- segmentKeys := sanitizeSegmentKeys(r.GetSegmentKey(), r.GetSegmentKeys())
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- var (
- now = flipt.Now()
- rule = &flipt.Rule{
- Id: uuid.NewString(),
- NamespaceKey: r.NamespaceKey,
- FlagKey: r.FlagKey,
- Rank: r.Rank,
- SegmentOperator: r.SegmentOperator,
- CreatedAt: now,
- UpdatedAt: now,
- }
- )
-
- // Force segment operator to be OR when `segmentKeys` length is 1.
- if len(segmentKeys) == 1 {
- rule.SegmentOperator = flipt.SegmentOperator_OR_SEGMENT_OPERATOR
- }
-
- tx, err := s.db.Begin()
- if err != nil {
- return nil, err
- }
-
- defer func() {
- if err != nil {
- _ = tx.Rollback()
- }
- }()
-
- if _, err := s.builder.
- Insert("rules").
- RunWith(tx).
- Columns("id", "namespace_key", "flag_key", "\"rank\"", "segment_operator", "created_at", "updated_at").
- Values(
- rule.Id,
- rule.NamespaceKey,
- rule.FlagKey,
- rule.Rank,
- int32(rule.SegmentOperator),
- &fliptsql.Timestamp{Timestamp: rule.CreatedAt},
- &fliptsql.Timestamp{Timestamp: rule.UpdatedAt},
- ).
- ExecContext(ctx); err != nil {
- return nil, err
- }
-
- for _, segmentKey := range segmentKeys {
- if _, err := s.builder.
- Insert("rule_segments").
- RunWith(tx).
- Columns("rule_id", "namespace_key", "segment_key").
- Values(
- rule.Id,
- rule.NamespaceKey,
- segmentKey,
- ).
- ExecContext(ctx); err != nil {
- return nil, err
- }
- }
-
- if len(segmentKeys) == 1 {
- rule.SegmentKey = segmentKeys[0]
- } else {
- rule.SegmentKeys = segmentKeys
- }
-
- return rule, tx.Commit()
-}
-
-// UpdateRule updates an existing rule
-func (s *Store) UpdateRule(ctx context.Context, r *flipt.UpdateRuleRequest) (_ *flipt.Rule, err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- segmentKeys := sanitizeSegmentKeys(r.GetSegmentKey(), r.GetSegmentKeys())
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- tx, err := s.db.Begin()
- if err != nil {
- return nil, err
- }
-
- defer func() {
- if err != nil {
- _ = tx.Rollback()
- }
- }()
-
- segmentOperator := r.SegmentOperator
- if len(segmentKeys) == 1 {
- segmentOperator = flipt.SegmentOperator_OR_SEGMENT_OPERATOR
- }
-
- // Set segment operator.
- _, err = s.builder.Update("rules").
- RunWith(tx).
- Set("segment_operator", segmentOperator).
- Set("updated_at", &fliptsql.Timestamp{Timestamp: flipt.Now()}).
- Where(sq.Eq{"id": r.Id, "namespace_key": r.NamespaceKey, "flag_key": r.FlagKey}).
- ExecContext(ctx)
- if err != nil {
- return nil, err
- }
-
- // Delete and reinsert segmentKeys.
- if _, err = s.builder.Delete("rule_segments").
- RunWith(tx).
- Where(sq.And{sq.Eq{"rule_id": r.Id}, sq.Eq{"namespace_key": r.NamespaceKey}}).
- ExecContext(ctx); err != nil {
- return nil, err
- }
-
- for _, segmentKey := range segmentKeys {
- if _, err := s.builder.
- Insert("rule_segments").
- RunWith(tx).
- Columns("rule_id", "namespace_key", "segment_key").
- Values(
- r.Id,
- r.NamespaceKey,
- segmentKey,
- ).
- ExecContext(ctx); err != nil {
- return nil, err
- }
- }
-
- if err = tx.Commit(); err != nil {
- return nil, err
- }
-
- return s.GetRule(ctx, storage.NewNamespace(r.NamespaceKey), r.Id)
-}
-
-// DeleteRule deletes a rule
-func (s *Store) DeleteRule(ctx context.Context, r *flipt.DeleteRuleRequest) (err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- tx, err := s.db.Begin()
- if err != nil {
- return err
- }
-
- // delete rule
- _, err = s.builder.Delete("rules").
- RunWith(tx).
- Where(sq.And{sq.Eq{"id": r.Id}, sq.Eq{"namespace_key": r.NamespaceKey}, sq.Eq{"flag_key": r.FlagKey}}).
- ExecContext(ctx)
- if err != nil {
- _ = tx.Rollback()
- return err
- }
-
- // reorder existing rules after deletion
- rows, err := s.builder.Select("id").
- RunWith(tx).
- From("rules").
- Where(sq.And{sq.Eq{"namespace_key": r.NamespaceKey}, sq.Eq{"flag_key": r.FlagKey}}).
- OrderBy("\"rank\" ASC").
- QueryContext(ctx)
- if err != nil {
- _ = tx.Rollback()
- return err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- _ = tx.Rollback()
- err = cerr
- }
- }()
-
- var ruleIDs []string
-
- for rows.Next() {
- var ruleID string
-
- if err := rows.Scan(&ruleID); err != nil {
- _ = tx.Rollback()
- return err
- }
-
- ruleIDs = append(ruleIDs, ruleID)
- }
-
- if err := rows.Err(); err != nil {
- _ = tx.Rollback()
- return err
- }
-
- if err := s.orderRules(ctx, tx, r.NamespaceKey, r.FlagKey, ruleIDs); err != nil {
- _ = tx.Rollback()
- return err
- }
-
- return tx.Commit()
-}
-
-// OrderRules orders rules
-func (s *Store) OrderRules(ctx context.Context, r *flipt.OrderRulesRequest) (err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- tx, err := s.db.Begin()
- if err != nil {
- return err
- }
-
- if err := s.orderRules(ctx, tx, r.NamespaceKey, r.FlagKey, r.RuleIds); err != nil {
- _ = tx.Rollback()
- return err
- }
-
- return tx.Commit()
-}
-
-func (s *Store) orderRules(ctx context.Context, runner sq.BaseRunner, namespaceKey, flagKey string, ruleIDs []string) error {
- updatedAt := flipt.Now()
-
- for i, id := range ruleIDs {
- _, err := s.builder.Update("rules").
- RunWith(runner).
- Set("\"rank\"", i+1).
- Set("updated_at", &fliptsql.Timestamp{Timestamp: updatedAt}).
- Where(sq.And{sq.Eq{"id": id}, sq.Eq{"namespace_key": namespaceKey}, sq.Eq{"flag_key": flagKey}}).
- ExecContext(ctx)
- if err != nil {
- return err
- }
- }
-
- return nil
-}
-
-func (s *Store) distributionValidationHelper(ctx context.Context, distributionRequest interface {
- GetFlagKey() string
- GetNamespaceKey() string
- GetVariantId() string
- GetRuleId() string
-},
-) error {
- var count int
- if err := s.builder.Select("COUNT(*)").
- From("rules").
- Join("variants USING (namespace_key)").
- Join("flags USING (namespace_key)").
- Where(sq.Eq{
- "namespace_key": distributionRequest.GetNamespaceKey(),
- "rules.id": distributionRequest.GetRuleId(),
- "variants.id": distributionRequest.GetVariantId(),
- "flags.\"key\"": distributionRequest.GetFlagKey(),
- }).
- QueryRowContext(ctx).
- Scan(&count); err != nil {
- return err
- }
-
- if count < 1 {
- return sql.ErrNoRows
- }
-
- return nil
-}
-
-// CreateDistribution creates a distribution
-func (s *Store) CreateDistribution(ctx context.Context, r *flipt.CreateDistributionRequest) (_ *flipt.Distribution, err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- var (
- now = flipt.Now()
- d = &flipt.Distribution{
- Id: uuid.NewString(),
- RuleId: r.RuleId,
- VariantId: r.VariantId,
- Rollout: r.Rollout,
- CreatedAt: now,
- UpdatedAt: now,
- }
- )
-
- err = s.distributionValidationHelper(ctx, r)
- if err != nil {
- if errors.Is(err, sql.ErrNoRows) {
- return nil, errs.ErrNotFoundf("variant %q, rule %q, flag %q in namespace %q", r.VariantId, r.RuleId, r.FlagKey, r.NamespaceKey)
- }
- return nil, err
- }
-
- if _, err = s.builder.
- Insert("distributions").
- Columns("id", "rule_id", "variant_id", "rollout", "created_at", "updated_at").
- Values(
- d.Id,
- d.RuleId,
- d.VariantId,
- d.Rollout,
- &fliptsql.Timestamp{Timestamp: d.CreatedAt},
- &fliptsql.Timestamp{Timestamp: d.UpdatedAt}).
- ExecContext(ctx); err != nil {
- return nil, err
- }
-
- return d, nil
-}
-
-// UpdateDistribution updates an existing distribution
-func (s *Store) UpdateDistribution(ctx context.Context, r *flipt.UpdateDistributionRequest) (_ *flipt.Distribution, err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- err = s.distributionValidationHelper(ctx, r)
- if err != nil {
- if errors.Is(err, sql.ErrNoRows) {
- return nil, errs.ErrNotFoundf("variant %q, rule %q, flag %q in namespace %q", r.VariantId, r.RuleId, r.FlagKey, r.NamespaceKey)
- }
- return nil, err
- }
-
- query := s.builder.Update("distributions").
- Set("rollout", r.Rollout).
- Set("variant_id", r.VariantId).
- Set("updated_at", &fliptsql.Timestamp{Timestamp: flipt.Now()}).
- Where(sq.Eq{"id": r.Id, "rule_id": r.RuleId})
-
- res, err := query.ExecContext(ctx)
- if err != nil {
- return nil, err
- }
-
- count, err := res.RowsAffected()
- if err != nil {
- return nil, err
- }
-
- if count != 1 {
- return nil, errs.ErrNotFoundf("distribution %q", r.Id)
- }
-
- var (
- createdAt fliptsql.Timestamp
- updatedAt fliptsql.Timestamp
-
- distribution = &flipt.Distribution{}
- )
-
- if err := s.builder.Select("id, rule_id, variant_id, rollout, created_at, updated_at").
- From("distributions").
- Where(sq.And{sq.Eq{"id": r.Id}, sq.Eq{"rule_id": r.RuleId}, sq.Eq{"variant_id": r.VariantId}}).
- QueryRowContext(ctx).
- Scan(&distribution.Id, &distribution.RuleId, &distribution.VariantId, &distribution.Rollout, &createdAt, &updatedAt); err != nil {
- return nil, err
- }
-
- distribution.CreatedAt = createdAt.Timestamp
- distribution.UpdatedAt = updatedAt.Timestamp
-
- return distribution, nil
-}
-
-// DeleteDistribution deletes a distribution
-func (s *Store) DeleteDistribution(ctx context.Context, r *flipt.DeleteDistributionRequest) (err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- _, err = s.builder.Delete("distributions").
- Where(sq.And{sq.Eq{"id": r.Id}, sq.Eq{"rule_id": r.RuleId}, sq.Eq{"variant_id": r.VariantId}}).
- ExecContext(ctx)
-
- return err
-}
diff --git a/internal/storage/sql/common/segment.go b/internal/storage/sql/common/segment.go
deleted file mode 100644
index 31228b10e3..0000000000
--- a/internal/storage/sql/common/segment.go
+++ /dev/null
@@ -1,531 +0,0 @@
-package common
-
-import (
- "context"
- "database/sql"
- "encoding/base64"
- "encoding/json"
- "errors"
- "fmt"
- "strings"
-
- sq "github.com/Masterminds/squirrel"
- "github.com/google/uuid"
-
- errs "go.flipt.io/flipt/errors"
- "go.flipt.io/flipt/internal/storage"
- fliptsql "go.flipt.io/flipt/internal/storage/sql"
- flipt "go.flipt.io/flipt/rpc/flipt"
-)
-
-// GetSegment gets a segment
-func (s *Store) GetSegment(ctx context.Context, req storage.ResourceRequest) (*flipt.Segment, error) {
- var (
- createdAt fliptsql.Timestamp
- updatedAt fliptsql.Timestamp
-
- segment = &flipt.Segment{}
-
- err = s.builder.Select("namespace_key, \"key\", name, description, match_type, created_at, updated_at").
- From("segments").
- Where(sq.Eq{"namespace_key": req.Namespace(), "\"key\"": req.Key}).
- QueryRowContext(ctx).
- Scan(
- &segment.NamespaceKey,
- &segment.Key,
- &segment.Name,
- &segment.Description,
- &segment.MatchType,
- &createdAt,
- &updatedAt)
- )
-
- if err != nil {
- if errors.Is(err, sql.ErrNoRows) {
- return nil, errs.ErrNotFoundf("segment %q", req)
- }
-
- return nil, err
- }
-
- segment.CreatedAt = createdAt.Timestamp
- segment.UpdatedAt = updatedAt.Timestamp
-
- query := s.builder.Select("id, namespace_key, segment_key, type, property, operator, value, description, created_at, updated_at").
- From("constraints").
- Where(sq.And{sq.Eq{"namespace_key": segment.NamespaceKey}, sq.Eq{"segment_key": segment.Key}}).
- OrderBy("created_at ASC")
-
- rows, err := query.QueryContext(ctx)
- if err != nil {
- return segment, err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- for rows.Next() {
- var (
- constraint flipt.Constraint
- description sql.NullString
- createdAt, updatedAt fliptsql.Timestamp
- )
-
- if err := rows.Scan(
- &constraint.Id,
- &constraint.NamespaceKey,
- &constraint.SegmentKey,
- &constraint.Type,
- &constraint.Property,
- &constraint.Operator,
- &constraint.Value,
- &description,
- &createdAt,
- &updatedAt); err != nil {
- return segment, err
- }
-
- constraint.CreatedAt = createdAt.Timestamp
- constraint.UpdatedAt = updatedAt.Timestamp
- constraint.Description = description.String
- segment.Constraints = append(segment.Constraints, &constraint)
- }
-
- return segment, rows.Err()
-}
-
-type optionalConstraint struct {
- Id sql.NullString
- NamespaceKey sql.NullString
- SegmentKey sql.NullString
- Type sql.NullInt32
- Property sql.NullString
- Operator sql.NullString
- Value sql.NullString
- Description sql.NullString
- CreatedAt fliptsql.NullableTimestamp
- UpdatedAt fliptsql.NullableTimestamp
-}
-
-// ListSegments lists all segments
-func (s *Store) ListSegments(ctx context.Context, req *storage.ListRequest[storage.NamespaceRequest]) (storage.ResultSet[*flipt.Segment], error) {
- var (
- segments []*flipt.Segment
- results = storage.ResultSet[*flipt.Segment]{}
-
- query = s.builder.Select("namespace_key, \"key\", name, description, match_type, created_at, updated_at").
- From("segments").
- Where(sq.Eq{"namespace_key": req.Predicate.Namespace()}).
- OrderBy(fmt.Sprintf("created_at %s", req.QueryParams.Order))
- )
-
- if req.QueryParams.Limit > 0 {
- query = query.Limit(req.QueryParams.Limit + 1)
- }
-
- var offset uint64
-
- if req.QueryParams.PageToken != "" {
- token, err := decodePageToken(s.logger, req.QueryParams.PageToken)
- if err != nil {
- return results, err
- }
-
- offset = token.Offset
- query = query.Offset(offset)
- } else if req.QueryParams.Offset > 0 {
- offset = req.QueryParams.Offset
- query = query.Offset(offset)
- }
-
- rows, err := query.QueryContext(ctx)
- if err != nil {
- return results, err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- // keep track of segments we've seen so we don't append duplicates because of the join
- segmentsByKey := make(map[string]*flipt.Segment)
-
- for rows.Next() {
- var (
- segment = &flipt.Segment{}
- sCreatedAt fliptsql.Timestamp
- sUpdatedAt fliptsql.Timestamp
- )
-
- if err := rows.Scan(
- &segment.NamespaceKey,
- &segment.Key,
- &segment.Name,
- &segment.Description,
- &segment.MatchType,
- &sCreatedAt,
- &sUpdatedAt); err != nil {
- return results, err
- }
-
- segment.CreatedAt = sCreatedAt.Timestamp
- segment.UpdatedAt = sUpdatedAt.Timestamp
-
- segments = append(segments, segment)
- segmentsByKey[segment.Key] = segment
- }
-
- if err := rows.Err(); err != nil {
- return results, err
- }
-
- if err := rows.Close(); err != nil {
- return results, err
- }
-
- if err := s.setConstraints(ctx, req.Predicate.Namespace(), segmentsByKey); err != nil {
- return results, err
- }
-
- var next *flipt.Segment
-
- if len(segments) > int(req.QueryParams.Limit) && req.QueryParams.Limit > 0 {
- next = segments[len(segments)-1]
- segments = segments[:req.QueryParams.Limit]
- }
-
- results.Results = segments
-
- if next != nil {
- out, err := json.Marshal(PageToken{Key: next.Key, Offset: offset + uint64(len(segments))})
- if err != nil {
- return results, fmt.Errorf("encoding page token %w", err)
- }
- results.NextPageToken = base64.StdEncoding.EncodeToString(out)
- }
-
- return results, nil
-}
-
-func (s *Store) setConstraints(ctx context.Context, namespaceKey string, segmentsByKey map[string]*flipt.Segment) error {
- allSegmentKeys := make([]string, 0, len(segmentsByKey))
- for k := range segmentsByKey {
- allSegmentKeys = append(allSegmentKeys, k)
- }
-
- query := s.builder.Select("id, namespace_key, segment_key, type, property, operator, value, description, created_at, updated_at").
- From("constraints").
- Where(sq.Eq{"namespace_key": namespaceKey, "segment_key": allSegmentKeys}).
- OrderBy("created_at")
-
- rows, err := query.QueryContext(ctx)
- if err != nil {
- return err
- }
-
- defer func() {
- if cerr := rows.Close(); cerr != nil && err == nil {
- err = cerr
- }
- }()
-
- for rows.Next() {
- var (
- constraint optionalConstraint
- cCreatedAt fliptsql.NullableTimestamp
- cUpdatedAt fliptsql.NullableTimestamp
- )
-
- if err := rows.Scan(
- &constraint.Id,
- &constraint.NamespaceKey,
- &constraint.SegmentKey,
- &constraint.Type,
- &constraint.Property,
- &constraint.Operator,
- &constraint.Value,
- &constraint.Description,
- &cCreatedAt,
- &cUpdatedAt); err != nil {
- return err
- }
-
- if segment, ok := segmentsByKey[constraint.SegmentKey.String]; ok {
- segment.Constraints = append(segment.Constraints, &flipt.Constraint{
- Id: constraint.Id.String,
- NamespaceKey: constraint.NamespaceKey.String,
- SegmentKey: constraint.SegmentKey.String,
- Type: flipt.ComparisonType(constraint.Type.Int32),
- Property: constraint.Property.String,
- Operator: constraint.Operator.String,
- Value: constraint.Value.String,
- Description: constraint.Description.String,
- CreatedAt: cCreatedAt.Timestamp,
- UpdatedAt: cUpdatedAt.Timestamp,
- })
- }
- }
-
- if err := rows.Err(); err != nil {
- return err
- }
-
- return rows.Close()
-}
-
-// CountSegments counts all segments
-func (s *Store) CountSegments(ctx context.Context, ns storage.NamespaceRequest) (uint64, error) {
- var count uint64
-
- if err := s.builder.Select("COUNT(*)").
- From("segments").
- Where(sq.Eq{"namespace_key": ns.Namespace()}).
- QueryRowContext(ctx).
- Scan(&count); err != nil {
- return 0, err
- }
-
- return count, nil
-}
-
-// CreateSegment creates a segment
-func (s *Store) CreateSegment(ctx context.Context, r *flipt.CreateSegmentRequest) (_ *flipt.Segment, err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- var (
- now = flipt.Now()
- segment = &flipt.Segment{
- NamespaceKey: r.NamespaceKey,
- Key: r.Key,
- Name: r.Name,
- Description: r.Description,
- MatchType: r.MatchType,
- CreatedAt: now,
- UpdatedAt: now,
- }
- )
-
- if _, err := s.builder.Insert("segments").
- Columns("namespace_key", "\"key\"", "name", "description", "match_type", "created_at", "updated_at").
- Values(
- segment.NamespaceKey,
- segment.Key,
- segment.Name,
- segment.Description,
- int32(segment.MatchType),
- &fliptsql.Timestamp{Timestamp: segment.CreatedAt},
- &fliptsql.Timestamp{Timestamp: segment.UpdatedAt}).
- ExecContext(ctx); err != nil {
- return nil, err
- }
-
- return segment, nil
-}
-
-// UpdateSegment updates an existing segment
-func (s *Store) UpdateSegment(ctx context.Context, r *flipt.UpdateSegmentRequest) (_ *flipt.Segment, err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- query := s.builder.Update("segments").
- Set("name", r.Name).
- Set("description", r.Description).
- Set("match_type", r.MatchType).
- Set("updated_at", &fliptsql.Timestamp{Timestamp: flipt.Now()}).
- Where(sq.Eq{"namespace_key": r.NamespaceKey, "\"key\"": r.Key})
-
- res, err := query.ExecContext(ctx)
- if err != nil {
- return nil, err
- }
-
- count, err := res.RowsAffected()
- if err != nil {
- return nil, err
- }
-
- p := storage.NewResource(r.NamespaceKey, r.Key)
-
- if count != 1 {
- return nil, errs.ErrNotFoundf("segment %q", p)
- }
-
- return s.GetSegment(ctx, p)
-}
-
-// DeleteSegment deletes a segment
-func (s *Store) DeleteSegment(ctx context.Context, r *flipt.DeleteSegmentRequest) (err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- _, err = s.builder.Delete("segments").
- Where(sq.And{sq.Eq{"namespace_key": r.NamespaceKey}, sq.Eq{"\"key\"": r.Key}}).
- ExecContext(ctx)
-
- return err
-}
-
-// CreateConstraint creates a constraint
-func (s *Store) CreateConstraint(ctx context.Context, r *flipt.CreateConstraintRequest) (_ *flipt.Constraint, err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- var (
- operator = strings.ToLower(r.Operator)
- now = flipt.Now()
- c = &flipt.Constraint{
- Id: uuid.NewString(),
- NamespaceKey: r.NamespaceKey,
- SegmentKey: r.SegmentKey,
- Type: r.Type,
- Property: r.Property,
- Operator: operator,
- Value: r.Value,
- CreatedAt: now,
- UpdatedAt: now,
- Description: r.Description,
- }
- )
-
- // unset value if operator does not require it
- if _, ok := flipt.NoValueOperators[c.Operator]; ok {
- c.Value = ""
- }
-
- if _, err := s.builder.Insert("constraints").
- Columns("id", "namespace_key", "segment_key", "type", "property", "operator", "value", "description", "created_at", "updated_at").
- Values(
- c.Id,
- c.NamespaceKey,
- c.SegmentKey,
- int32(c.Type),
- c.Property,
- c.Operator,
- c.Value,
- c.Description,
- &fliptsql.Timestamp{Timestamp: c.CreatedAt},
- &fliptsql.Timestamp{Timestamp: c.UpdatedAt}).
- ExecContext(ctx); err != nil {
- return nil, err
- }
-
- return c, nil
-}
-
-// UpdateConstraint updates an existing constraint
-func (s *Store) UpdateConstraint(ctx context.Context, r *flipt.UpdateConstraintRequest) (_ *flipt.Constraint, err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- var (
- whereClause = sq.And{sq.Eq{"id": r.Id}, sq.Eq{"segment_key": r.SegmentKey}, sq.Eq{"namespace_key": r.NamespaceKey}}
- operator = strings.ToLower(r.Operator)
- )
-
- // unset value if operator does not require it
- if _, ok := flipt.NoValueOperators[operator]; ok {
- r.Value = ""
- }
-
- res, err := s.builder.Update("constraints").
- Set("type", r.Type).
- Set("property", r.Property).
- Set("operator", operator).
- Set("value", r.Value).
- Set("description", r.Description).
- Set("updated_at", &fliptsql.Timestamp{Timestamp: flipt.Now()}).
- Where(whereClause).
- ExecContext(ctx)
- if err != nil {
- return nil, err
- }
-
- count, err := res.RowsAffected()
- if err != nil {
- return nil, err
- }
-
- if count != 1 {
- return nil, errs.ErrNotFoundf("constraint %q", r.Id)
- }
-
- var (
- createdAt fliptsql.Timestamp
- updatedAt fliptsql.Timestamp
-
- c = &flipt.Constraint{}
- )
-
- if err := s.builder.Select("id, namespace_key, segment_key, type, property, operator, value, description, created_at, updated_at").
- From("constraints").
- Where(whereClause).
- QueryRowContext(ctx).
- Scan(&c.Id, &c.NamespaceKey, &c.SegmentKey, &c.Type, &c.Property, &c.Operator, &c.Value, &c.Description, &createdAt, &updatedAt); err != nil {
- return nil, err
- }
-
- c.CreatedAt = createdAt.Timestamp
- c.UpdatedAt = updatedAt.Timestamp
-
- return c, nil
-}
-
-// DeleteConstraint deletes a constraint
-func (s *Store) DeleteConstraint(ctx context.Context, r *flipt.DeleteConstraintRequest) (err error) {
- defer func() {
- if err == nil {
- err = s.setVersion(ctx, r.NamespaceKey)
- }
- }()
-
- if r.NamespaceKey == "" {
- r.NamespaceKey = storage.DefaultNamespace
- }
-
- _, err = s.builder.Delete("constraints").
- Where(sq.And{sq.Eq{"id": r.Id}, sq.Eq{"segment_key": r.SegmentKey}, sq.Eq{"namespace_key": r.NamespaceKey}}).
- ExecContext(ctx)
-
- return err
-}
diff --git a/internal/storage/sql/common/storage.go b/internal/storage/sql/common/storage.go
deleted file mode 100644
index f8c1f6ad09..0000000000
--- a/internal/storage/sql/common/storage.go
+++ /dev/null
@@ -1,69 +0,0 @@
-package common
-
-import (
- "context"
- "database/sql"
- "time"
-
- sq "github.com/Masterminds/squirrel"
- "go.flipt.io/flipt/internal/storage"
- fliptsql "go.flipt.io/flipt/internal/storage/sql"
- "go.uber.org/zap"
-)
-
-var _ storage.Store = &Store{}
-
-type Store struct {
- builder sq.StatementBuilderType
- db *sql.DB
- logger *zap.Logger
-}
-
-func NewStore(db *sql.DB, builder sq.StatementBuilderType, logger *zap.Logger) *Store {
- return &Store{
- db: db,
- builder: builder,
- logger: logger,
- }
-}
-
-type PageToken struct {
- Key string `json:"key,omitempty"`
- Offset uint64 `json:"offset,omitempty"`
-}
-
-func (s *Store) String() string {
- return ""
-}
-
-func (s *Store) GetVersion(ctx context.Context, ns storage.NamespaceRequest) (string, error) {
- var stateModifiedAt fliptsql.NullableTimestamp
-
- err := s.builder.
- Select("state_modified_at").
- From("namespaces").
- Where(sq.Eq{"\"key\"": ns.Namespace()}).
- Limit(1).
- RunWith(s.db).
- QueryRowContext(ctx).
- Scan(&stateModifiedAt)
-
- if err != nil {
- return "", err
- }
-
- if !stateModifiedAt.IsValid() {
- return "", nil
- }
-
- return stateModifiedAt.Timestamp.String(), nil
-}
-
-func (s *Store) setVersion(ctx context.Context, namespace string) error {
- _, err := s.builder.
- Update("namespaces").
- Set("state_modified_at", time.Now().UTC()).
- Where(sq.Eq{"\"key\"": namespace}).
- ExecContext(ctx)
- return err
-}
diff --git a/internal/storage/sql/common/util.go b/internal/storage/sql/common/util.go
deleted file mode 100644
index 4fcd954fd6..0000000000
--- a/internal/storage/sql/common/util.go
+++ /dev/null
@@ -1,56 +0,0 @@
-package common
-
-import (
- "encoding/base64"
- "encoding/json"
-
- "go.flipt.io/flipt/errors"
- "go.uber.org/zap"
-)
-
-// decodePageToken is a utility function which determines the `PageToken` based on
-// input.
-func decodePageToken(logger *zap.Logger, pageToken string) (PageToken, error) {
- var token PageToken
-
- tok, err := base64.StdEncoding.DecodeString(pageToken)
- if err != nil {
- logger.Warn("invalid page token provided", zap.Error(err))
- return token, errors.ErrInvalidf("pageToken is not valid: %q", pageToken)
- }
-
- if err := json.Unmarshal(tok, &token); err != nil {
- logger.Warn("invalid page token provided", zap.Error(err))
-
- return token, errors.ErrInvalidf("pageToken is not valid: %q", pageToken)
- }
-
- return token, nil
-}
-
-// removeDuplicates is an inner utility function that will deduplicate a slice of strings.
-func removeDuplicates(src []string) []string {
- allKeys := make(map[string]bool)
-
- dest := []string{}
-
- for _, item := range src {
- if _, value := allKeys[item]; !value {
- allKeys[item] = true
- dest = append(dest, item)
- }
- }
-
- return dest
-}
-
-// sanitizeSegmentKeys is a utility function that will transform segment keys into the right input.
-func sanitizeSegmentKeys(segmentKey string, segmentKeys []string) []string {
- if len(segmentKeys) > 0 {
- return removeDuplicates(segmentKeys)
- } else if segmentKey != "" {
- return []string{segmentKey}
- }
-
- return nil
-}
diff --git a/internal/storage/sql/db.go b/internal/storage/sql/db.go
index 5c51b906a2..0ba5ff1332 100644
--- a/internal/storage/sql/db.go
+++ b/internal/storage/sql/db.go
@@ -2,188 +2,20 @@ package sql
import (
"database/sql"
- "database/sql/driver"
"errors"
- "fmt"
- "io/fs"
- "net/url"
"github.com/ClickHouse/clickhouse-go/v2"
- sq "github.com/Masterminds/squirrel"
- "github.com/XSAM/otelsql"
- "github.com/go-sql-driver/mysql"
- "github.com/mattn/go-sqlite3"
- "github.com/tursodatabase/libsql-client-go/libsql"
- "github.com/xo/dburl"
"go.flipt.io/flipt/internal/config"
- "go.opentelemetry.io/otel/attribute"
- semconv "go.opentelemetry.io/otel/semconv/v1.26.0"
)
-func init() {
- // we do a bit of surgery in dburl to stop it from walking
- // up the provided file:/path to see if any parent directories
- // exist, else dburl assumes the postgres protocol.
- // see: https://github.com/xo/dburl/issues/35
- stat := dburl.Stat
- dburl.Stat = func(name string) (fs.FileInfo, error) {
- fi, err := stat(name)
- if err == nil {
- return fi, nil
- }
-
- if errors.Is(err, fs.ErrNotExist) {
- return fileInfo(name), nil
- }
-
- return nil, err
- }
-
- // register libsql driver with dburl
- dburl.Register(dburl.Scheme{
- Driver: "libsql",
- Generator: dburl.GenOpaque,
- Transport: 0,
- Opaque: true,
- Aliases: []string{"libsql", "http", "https"},
- Override: "file",
- })
- // drop references to lib/pq and relay on pgx
- dburl.Unregister("postgres")
- dburl.RegisterAlias("pgx", "postgres")
- dburl.RegisterAlias("pgx", "postgresql")
-}
-
-// Open opens a connection to the db
-func Open(cfg config.Config, opts ...Option) (*sql.DB, Driver, error) {
- var options Options
- for _, opt := range opts {
- opt(&options)
- }
-
- sql, driver, err := open(cfg, options)
- if err != nil {
- return nil, 0, err
- }
-
- err = otelsql.RegisterDBStatsMetrics(sql,
- otelsql.WithAttributes(
- attribute.Key("driver").String(driver.String()),
- ))
-
- return sql, driver, err
-}
-
-// BuilderFor returns a squirrel statement builder which decorates
-// the provided sql.DB configured for the provided driver.
-func BuilderFor(db *sql.DB, driver Driver, preparedStatementsEnabled bool) sq.StatementBuilderType {
- var brdb sq.BaseRunner = db
- if preparedStatementsEnabled {
- brdb = sq.NewStmtCacher(db)
- }
-
- builder := sq.StatementBuilder.RunWith(brdb)
- if driver == Postgres || driver == CockroachDB {
- builder = builder.PlaceholderFormat(sq.Dollar)
- }
-
- return builder
-}
-
-type Options struct {
- sslDisabled bool
- migrate bool
-}
-
-type Option func(*Options)
-
-func WithSSLDisabled(o *Options) {
- o.sslDisabled = true
-}
-
-func WithMigrate(o *Options) {
- o.migrate = true
-}
-
-func open(cfg config.Config, opts Options) (*sql.DB, Driver, error) {
- d, url, err := parse(cfg, opts)
- if err != nil {
- return nil, 0, err
- }
-
- driverName := fmt.Sprintf("instrumented-%s", d)
-
- var (
- dr driver.Driver
- attrs []attribute.KeyValue
- )
-
- switch d {
- case SQLite:
- dr = &sqlite3.SQLiteDriver{}
- attrs = []attribute.KeyValue{semconv.DBSystemSqlite}
- case LibSQL:
- dr = &libsql.Driver{}
- attrs = []attribute.KeyValue{semconv.DBSystemSqlite}
- case Postgres:
- dr = newAdaptedPostgresDriver(d)
- attrs = []attribute.KeyValue{semconv.DBSystemPostgreSQL}
- case CockroachDB:
- dr = newAdaptedPostgresDriver(d)
- attrs = []attribute.KeyValue{semconv.DBSystemCockroachdb}
- case MySQL:
- dr = &mysql.MySQLDriver{}
- attrs = []attribute.KeyValue{semconv.DBSystemMySQL}
- }
-
- registered := false
-
- for _, dd := range sql.Drivers() {
- if dd == driverName {
- registered = true
- break
- }
- }
-
- if !registered {
- sql.Register(driverName, otelsql.WrapDriver(dr, otelsql.WithAttributes(attrs...)))
- }
-
- db, err := sql.Open(driverName, url.DSN)
- if err != nil {
- return nil, 0, fmt.Errorf("opening db for driver: %s %w", d, err)
- }
-
- db.SetMaxIdleConns(cfg.Database.MaxIdleConn)
-
- var maxOpenConn int
- if cfg.Database.MaxOpenConn > 0 {
- maxOpenConn = cfg.Database.MaxOpenConn
- }
-
- // if we're using sqlite, we need to set always set the max open connections to 1
- // see: https://github.com/mattn/go-sqlite3/issues/274
- if d == SQLite || d == LibSQL {
- maxOpenConn = 1
- }
-
- db.SetMaxOpenConns(maxOpenConn)
-
- if cfg.Database.ConnMaxLifetime > 0 {
- db.SetConnMaxLifetime(cfg.Database.ConnMaxLifetime)
- }
-
- return db, d, nil
-}
-
// openAnalytics is a convenience function of providing a database.sql instance for
// an analytics database.
func openAnalytics(cfg config.Config) (*sql.DB, Driver, error) {
if cfg.Analytics.Storage.Clickhouse.Enabled {
clickhouseOptions, err := cfg.Analytics.Storage.Clickhouse.Options()
if err != nil {
- return nil, 0, err
+ return nil, "", err
}
db := clickhouse.OpenDB(clickhouseOptions)
@@ -191,129 +23,26 @@ func openAnalytics(cfg config.Config) (*sql.DB, Driver, error) {
return db, Clickhouse, nil
}
- return nil, 0, errors.New("no analytics db provided")
+ return nil, "", errors.New("no analytics db provided")
}
var (
- driverToString = map[Driver]string{
- SQLite: "sqlite3",
- LibSQL: "libsql",
- Postgres: "postgres",
- MySQL: "mysql",
- CockroachDB: "cockroachdb",
- Clickhouse: "clickhouse",
- }
-
stringToDriver = map[string]Driver{
- "sqlite3": SQLite,
- "libsql": LibSQL,
- "pgx": Postgres,
- "mysql": MySQL,
- "cockroachdb": CockroachDB,
- "clickhouse": Clickhouse,
+ "clickhouse": Clickhouse,
}
)
// Driver represents a database driver
-type Driver uint8
+type Driver string
func (d Driver) String() string {
- return driverToString[d]
+ return string(d)
}
func (d Driver) Migrations() string {
- if d == LibSQL {
- return "sqlite3"
- }
return d.String()
}
const (
- _ Driver = iota
- // SQLite ...
- SQLite
- // Postgres ...
- Postgres
- // MySQL ...
- MySQL
- // CockroachDB ...
- CockroachDB
- // LibSQL ...
- LibSQL
- // Clickhouse ...
- Clickhouse
+ Clickhouse Driver = "clickhouse"
)
-
-func parse(cfg config.Config, opts Options) (Driver, *dburl.URL, error) {
- u := cfg.Database.URL
-
- if u == "" {
- host := cfg.Database.Host
-
- if cfg.Database.Port > 0 {
- host = fmt.Sprintf("%s:%d", host, cfg.Database.Port)
- }
-
- uu := url.URL{
- Scheme: cfg.Database.Protocol.String(),
- Host: host,
- Path: cfg.Database.Name,
- }
-
- if cfg.Database.User != "" {
- if cfg.Database.Password != "" {
- uu.User = url.UserPassword(cfg.Database.User, cfg.Database.Password)
- } else {
- uu.User = url.User(cfg.Database.User)
- }
- }
-
- u = uu.String()
- }
-
- url, err := dburl.Parse(u)
- if err != nil {
- return 0, nil, fmt.Errorf("error parsing url: %w", err)
- }
-
- driver := stringToDriver[url.UnaliasedDriver]
- if driver == 0 {
- return 0, nil, fmt.Errorf("unknown database driver for: %q", url.Driver)
- }
-
- v := url.Query()
- switch driver {
- case Postgres, CockroachDB:
- if opts.sslDisabled {
- v.Set("sslmode", "disable")
- }
-
- if !cfg.Database.PreparedStatementsEnabled {
- v.Set("default_query_exec_mode", "simple_protocol")
- }
- case MySQL:
- v.Set("multiStatements", "true")
- v.Set("parseTime", "true")
- if !opts.migrate {
- v.Set("sql_mode", "ANSI")
- }
- case SQLite, LibSQL:
- if url.Scheme != "http" && url.Scheme != "https" {
- v.Set("cache", "shared")
- v.Set("mode", "rwc")
- v.Set("_fk", "true")
- }
- }
-
- url.RawQuery = v.Encode()
- // we need to re-parse since we modified the query params
- url, err = dburl.Parse(url.URL.String())
-
- if url.Scheme == "http" {
- url.DSN = "http://" + url.DSN
- } else if url.Scheme == "https" {
- url.DSN = "https://" + url.DSN
- }
-
- return driver, url, err
-}
diff --git a/internal/storage/sql/db_internal_test.go b/internal/storage/sql/db_internal_test.go
deleted file mode 100644
index 45fdf4ab63..0000000000
--- a/internal/storage/sql/db_internal_test.go
+++ /dev/null
@@ -1,345 +0,0 @@
-package sql
-
-import (
- "testing"
-
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
- "go.flipt.io/flipt/internal/config"
-)
-
-func TestParse(t *testing.T) {
- tests := []struct {
- name string
- cfg config.DatabaseConfig
- dsn string
- driver Driver
- options []Option
- wantErr bool
- }{
- {
- name: "sqlite url",
- cfg: config.DatabaseConfig{
- URL: "file:flipt.db",
- },
- driver: SQLite,
- dsn: "flipt.db?_fk=true&cache=shared&mode=rwc",
- },
- {
- name: "sqlite",
- cfg: config.DatabaseConfig{
- Protocol: config.DatabaseSQLite,
- Host: "flipt.db",
- },
- driver: SQLite,
- dsn: "flipt.db?_fk=true&cache=shared&mode=rwc",
- },
- {
- name: "postgresql url",
- cfg: config.DatabaseConfig{
- URL: "postgresql://postgres@localhost:5432/flipt?sslmode=disable",
- },
- driver: Postgres,
- dsn: "postgres://postgres@localhost:5432/flipt?default_query_exec_mode=simple_protocol&sslmode=disable",
- },
- {
- name: "postgresql url prepared statements enabled",
- cfg: config.DatabaseConfig{
- URL: "postgresql://postgres@localhost:5432/flipt?sslmode=disable",
- PreparedStatementsEnabled: true,
- },
- driver: Postgres,
- dsn: "postgres://postgres@localhost:5432/flipt?sslmode=disable",
- },
- {
- name: "postgresql no disable sslmode",
- cfg: config.DatabaseConfig{
- URL: "postgresql://postgres@localhost:5432/flipt",
- },
- driver: Postgres,
- dsn: "postgres://postgres@localhost:5432/flipt?default_query_exec_mode=simple_protocol",
- },
- {
- name: "postgres url prepared statements enabled",
- cfg: config.DatabaseConfig{
- URL: "postgres://postgres@localhost:5432/flipt?sslmode=disable",
- PreparedStatementsEnabled: true,
- },
- driver: Postgres,
- dsn: "postgres://postgres@localhost:5432/flipt?sslmode=disable",
- },
- {
- name: "postgres url",
- cfg: config.DatabaseConfig{
- URL: "postgres://postgres@localhost:5432/flipt?sslmode=disable",
- },
- driver: Postgres,
- dsn: "postgres://postgres@localhost:5432/flipt?default_query_exec_mode=simple_protocol&sslmode=disable",
- },
- {
- name: "postgres no disable sslmode",
- cfg: config.DatabaseConfig{
- URL: "postgres://postgres@localhost:5432/flipt",
- },
- driver: Postgres,
- dsn: "postgres://postgres@localhost:5432/flipt?default_query_exec_mode=simple_protocol",
- },
- {
- name: "postgres disable sslmode via opts",
- cfg: config.DatabaseConfig{
- Protocol: config.DatabasePostgres,
- Name: "flipt",
- Host: "localhost",
- Port: 5432,
- User: "postgres",
- },
- options: []Option{WithSSLDisabled},
- driver: Postgres,
- dsn: "postgres://postgres@localhost:5432/flipt?default_query_exec_mode=simple_protocol&sslmode=disable",
- },
- {
- name: "postgres no port",
- cfg: config.DatabaseConfig{
- Protocol: config.DatabasePostgres,
- Name: "flipt",
- Host: "localhost",
- User: "postgres",
- },
- driver: Postgres,
- dsn: "postgres://postgres@localhost:5432/flipt?default_query_exec_mode=simple_protocol",
- },
- {
- name: "postgres no password",
- cfg: config.DatabaseConfig{
- Protocol: config.DatabasePostgres,
- Name: "flipt",
- Host: "localhost",
- Port: 5432,
- User: "postgres",
- },
- driver: Postgres,
- dsn: "postgres://postgres@localhost:5432/flipt?default_query_exec_mode=simple_protocol",
- },
- {
- name: "postgres with password",
- cfg: config.DatabaseConfig{
- Protocol: config.DatabasePostgres,
- Name: "flipt",
- Host: "localhost",
- Port: 5432,
- User: "postgres",
- Password: "foo",
- },
- driver: Postgres,
- dsn: "postgres://postgres:foo@localhost:5432/flipt?default_query_exec_mode=simple_protocol",
- },
- {
- name: "mysql url",
- cfg: config.DatabaseConfig{
- URL: "mysql://mysql@localhost:3306/flipt",
- },
- driver: MySQL,
- dsn: "mysql@tcp(localhost:3306)/flipt?multiStatements=true&parseTime=true&sql_mode=ANSI",
- },
- {
- name: "mysql no ANSI sql mode via opts",
- cfg: config.DatabaseConfig{
- Protocol: config.DatabaseMySQL,
- Name: "flipt",
- Host: "localhost",
- Port: 3306,
- User: "mysql",
- },
- options: []Option{
- WithMigrate,
- },
- driver: MySQL,
- dsn: "mysql@tcp(localhost:3306)/flipt?multiStatements=true&parseTime=true",
- },
- {
- name: "mysql no port",
- cfg: config.DatabaseConfig{
- Protocol: config.DatabaseMySQL,
- Name: "flipt",
- Host: "localhost",
- User: "mysql",
- Password: "foo",
- },
- driver: MySQL,
- dsn: "mysql:foo@tcp(localhost:3306)/flipt?multiStatements=true&parseTime=true&sql_mode=ANSI",
- },
- {
- name: "mysql no password",
- cfg: config.DatabaseConfig{
- Protocol: config.DatabaseMySQL,
- Name: "flipt",
- Host: "localhost",
- Port: 3306,
- User: "mysql",
- },
- driver: MySQL,
- dsn: "mysql@tcp(localhost:3306)/flipt?multiStatements=true&parseTime=true&sql_mode=ANSI",
- },
- {
- name: "mysql with password",
- cfg: config.DatabaseConfig{
- Protocol: config.DatabaseMySQL,
- Name: "flipt",
- Host: "localhost",
- Port: 3306,
- User: "mysql",
- Password: "foo",
- },
- driver: MySQL,
- dsn: "mysql:foo@tcp(localhost:3306)/flipt?multiStatements=true&parseTime=true&sql_mode=ANSI",
- },
- {
- name: "cockroachdb url prepared statements enabled",
- cfg: config.DatabaseConfig{
- URL: "cockroachdb://cockroachdb@localhost:26257/flipt?sslmode=disable",
- PreparedStatementsEnabled: true,
- },
- driver: CockroachDB,
- dsn: "postgres://cockroachdb@localhost:26257/flipt?sslmode=disable",
- },
- {
- name: "cockroachdb url",
- cfg: config.DatabaseConfig{
- URL: "cockroachdb://cockroachdb@localhost:26257/flipt?sslmode=disable",
- },
- driver: CockroachDB,
- dsn: "postgres://cockroachdb@localhost:26257/flipt?default_query_exec_mode=simple_protocol&sslmode=disable",
- },
- {
- name: "cockroachdb url alternative (cockroach://",
- cfg: config.DatabaseConfig{
- URL: "cockroach://cockroachdb@localhost:26257/flipt?sslmode=disable",
- },
- driver: CockroachDB,
- dsn: "postgres://cockroachdb@localhost:26257/flipt?default_query_exec_mode=simple_protocol&sslmode=disable",
- },
- {
- name: "cockroachdb url alternative (crdb://",
- cfg: config.DatabaseConfig{
- URL: "crdb://cockroachdb@localhost:26257/flipt?sslmode=disable",
- },
- driver: CockroachDB,
- dsn: "postgres://cockroachdb@localhost:26257/flipt?default_query_exec_mode=simple_protocol&sslmode=disable",
- },
- {
- name: "cockroachdb default disable sslmode",
- // cockroachdb defaults to sslmode=disable
- // https://www.cockroachlabs.com/docs/stable/connection-parameters.html#additional-connection-parameters
- cfg: config.DatabaseConfig{
- URL: "cockroachdb://cockroachdb@localhost:26257/flipt",
- },
- driver: CockroachDB,
- dsn: "postgres://cockroachdb@localhost:26257/flipt?default_query_exec_mode=simple_protocol&sslmode=disable",
- },
- {
- name: "cockroachdb disable sslmode via opts",
- cfg: config.DatabaseConfig{
- Protocol: config.DatabaseCockroachDB,
- Name: "flipt",
- Host: "localhost",
- Port: 26257,
- User: "cockroachdb",
- },
- options: []Option{
- WithSSLDisabled,
- },
- driver: CockroachDB,
- dsn: "postgres://cockroachdb@localhost:26257/flipt?default_query_exec_mode=simple_protocol&sslmode=disable",
- },
- {
- name: "cockroachdb no port",
- cfg: config.DatabaseConfig{
- Protocol: config.DatabaseCockroachDB,
- Name: "flipt",
- Host: "localhost",
- User: "cockroachdb",
- },
- driver: CockroachDB,
- dsn: "postgres://cockroachdb@localhost:26257/flipt?default_query_exec_mode=simple_protocol&sslmode=disable",
- },
- {
- name: "cockroachdb no password",
- cfg: config.DatabaseConfig{
- Protocol: config.DatabaseCockroachDB,
- Name: "flipt",
- Host: "localhost",
- Port: 26257,
- User: "cockroachdb",
- },
- driver: CockroachDB,
- dsn: "postgres://cockroachdb@localhost:26257/flipt?default_query_exec_mode=simple_protocol&sslmode=disable",
- },
- {
- name: "cockroachdb with password",
- cfg: config.DatabaseConfig{
- Protocol: config.DatabaseCockroachDB,
- Name: "flipt",
- Host: "localhost",
- Port: 26257,
- User: "cockroachdb",
- Password: "foo",
- },
- driver: CockroachDB,
- dsn: "postgres://cockroachdb:foo@localhost:26257/flipt?default_query_exec_mode=simple_protocol&sslmode=disable",
- },
- {
- name: "invalid url",
- cfg: config.DatabaseConfig{
- URL: "http://a b",
- },
- wantErr: true,
- },
- {
- name: "unknown driver",
- cfg: config.DatabaseConfig{
- URL: "mongo://127.0.0.1",
- },
- wantErr: true,
- },
- {
- name: "postgres multi hosts url",
- cfg: config.DatabaseConfig{
- URL: "postgres://user:pass@host1:5432,host2:2345/flipt?application_name=flipt&target_session_attrs=primary",
- PreparedStatementsEnabled: false,
- },
- driver: Postgres,
- dsn: "postgres://user:pass@host1:5432,host2:2345/flipt?application_name=flipt&default_query_exec_mode=simple_protocol&target_session_attrs=primary",
- },
- }
-
- for _, tt := range tests {
- tt := tt
-
- var (
- cfg = tt.cfg
- driver = tt.driver
- url = tt.dsn
- wantErr = tt.wantErr
- opts Options
- )
-
- for _, opt := range tt.options {
- opt(&opts)
- }
-
- t.Run(tt.name, func(t *testing.T) {
- d, u, err := parse(config.Config{
- Database: cfg,
- }, opts)
-
- if wantErr {
- require.Error(t, err)
- return
- }
-
- require.NoError(t, err)
- assert.Equal(t, driver, d)
- assert.Equal(t, url, u.DSN)
- })
- }
-}
diff --git a/internal/storage/sql/db_test.go b/internal/storage/sql/db_test.go
deleted file mode 100644
index e409a127b6..0000000000
--- a/internal/storage/sql/db_test.go
+++ /dev/null
@@ -1,218 +0,0 @@
-//nolint:gosec
-package sql_test
-
-import (
- "context"
- "fmt"
- "math/rand"
- "os"
- "testing"
- "time"
-
- sq "github.com/Masterminds/squirrel"
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
- "github.com/stretchr/testify/suite"
- "go.flipt.io/flipt/internal/config"
- "go.flipt.io/flipt/internal/storage"
- fliptsql "go.flipt.io/flipt/internal/storage/sql"
- "go.flipt.io/flipt/internal/storage/sql/mysql"
- "go.flipt.io/flipt/internal/storage/sql/postgres"
- "go.flipt.io/flipt/internal/storage/sql/sqlite"
- fliptsqltesting "go.flipt.io/flipt/internal/storage/sql/testing"
- "go.flipt.io/flipt/rpc/flipt"
- "go.uber.org/zap/zaptest"
-
- _ "github.com/golang-migrate/migrate/v4/source/file"
-)
-
-func TestOpen(t *testing.T) {
- tests := []struct {
- name string
- cfg config.DatabaseConfig
- driver fliptsql.Driver
- wantErr bool
- }{
- {
- name: "sqlite url",
- cfg: config.DatabaseConfig{
- URL: "file:/flipt.db",
- MaxOpenConn: 5,
- ConnMaxLifetime: 30 * time.Minute,
- },
- driver: fliptsql.SQLite,
- },
- {
- name: "sqlite url (without slash)",
- cfg: config.DatabaseConfig{
- URL: "file:flipt.db",
- MaxOpenConn: 5,
- ConnMaxLifetime: 30 * time.Minute,
- },
- driver: fliptsql.SQLite,
- },
- {
- name: "libsql url",
- cfg: config.DatabaseConfig{
- URL: "libsql://file:/flipt.db",
- MaxOpenConn: 5,
- ConnMaxLifetime: 30 * time.Minute,
- },
- driver: fliptsql.LibSQL,
- },
- {
- name: "libsql with http",
- cfg: config.DatabaseConfig{
- URL: "http://127.0.0.1:8000",
- MaxOpenConn: 5,
- ConnMaxLifetime: 30 * time.Minute,
- },
- driver: fliptsql.LibSQL,
- },
- {
- name: "libsql with https",
- cfg: config.DatabaseConfig{
- URL: "https://turso.remote",
- MaxOpenConn: 5,
- ConnMaxLifetime: 30 * time.Minute,
- },
- driver: fliptsql.LibSQL,
- },
- {
- name: "postgres url",
- cfg: config.DatabaseConfig{
- URL: "postgres://postgres@localhost:5432/flipt?sslmode=disable",
- },
- driver: fliptsql.Postgres,
- },
- {
- name: "mysql url",
- cfg: config.DatabaseConfig{
- URL: "mysql://mysql@localhost:3306/flipt",
- },
- driver: fliptsql.MySQL,
- },
- {
- name: "cockroachdb url",
- cfg: config.DatabaseConfig{
- URL: "cockroachdb://cockroachdb@localhost:26257/flipt?sslmode=disable",
- },
- driver: fliptsql.CockroachDB,
- },
- {
- name: "invalid url",
- cfg: config.DatabaseConfig{
- URL: "tcp://a b",
- },
- wantErr: true,
- },
- {
- name: "unknown driver",
- cfg: config.DatabaseConfig{
- URL: "mongo://127.0.0.1",
- },
- wantErr: true,
- },
- }
-
- for _, tt := range tests {
- var (
- cfg = tt.cfg
- driver = tt.driver
- wantErr = tt.wantErr
- )
-
- t.Run(tt.name, func(t *testing.T) {
- db, d, err := fliptsql.Open(config.Config{
- Database: cfg,
- })
-
- if wantErr {
- require.Error(t, err)
- return
- }
-
- require.NoError(t, err)
- require.NotNil(t, db)
-
- defer db.Close()
-
- assert.Equal(t, driver, d)
- })
- }
-}
-
-func TestDBTestSuite(t *testing.T) {
- suite.Run(t, new(DBTestSuite))
-}
-
-type DBTestSuite struct {
- suite.Suite
- db *fliptsqltesting.Database
- store storage.Store
- namespace string
-}
-
-func TestMain(m *testing.M) {
- os.Exit(m.Run())
-}
-
-func (s *DBTestSuite) SetupSuite() {
- setup := func() error {
- logger := zaptest.NewLogger(s.T())
-
- db, err := fliptsqltesting.Open()
- if err != nil {
- return err
- }
-
- s.db = db
-
- builder := sq.StatementBuilder.RunWith(sq.NewStmtCacher(db.DB))
-
- var store storage.Store
-
- switch db.Driver {
- case fliptsql.SQLite, fliptsql.LibSQL:
- store = sqlite.NewStore(db.DB, builder, logger)
- case fliptsql.Postgres, fliptsql.CockroachDB:
- store = postgres.NewStore(db.DB, builder, logger)
- case fliptsql.MySQL:
- store = mysql.NewStore(db.DB, builder, logger)
- }
-
- namespace := randomString(6)
-
- if _, err := store.CreateNamespace(context.Background(), &flipt.CreateNamespaceRequest{
- Key: namespace,
- }); err != nil {
- return fmt.Errorf("failed to create namespace: %w", err)
- }
-
- s.namespace = namespace
- s.store = store
- return nil
- }
-
- s.Require().NoError(setup())
-}
-
-func randomString(n int) string {
- var letters = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
-
- b := make([]rune, n)
- for i := range b {
- b[i] = letters[rand.Intn(len(letters))]
- }
-
- return string(b)
-}
-
-func (s *DBTestSuite) TearDownSuite() {
- shutdownCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
- defer cancel()
-
- if s.db != nil {
- s.db.Shutdown(shutdownCtx)
- }
-}
diff --git a/internal/storage/sql/errors.go b/internal/storage/sql/errors.go
deleted file mode 100644
index 049af97af2..0000000000
--- a/internal/storage/sql/errors.go
+++ /dev/null
@@ -1,109 +0,0 @@
-package sql
-
-import (
- "database/sql"
- "errors"
-
- "github.com/go-sql-driver/mysql"
- "github.com/jackc/pgx/v5/pgconn"
- "github.com/mattn/go-sqlite3"
- errs "go.flipt.io/flipt/errors"
-)
-
-var (
- errNotFound = errs.ErrNotFound("resource")
- errConstraintViolated = errs.ErrInvalid("contraint violated")
- errNotUnique = errs.ErrInvalid("not unique")
- errForeignKeyNotFound = errs.ErrNotFound("associated resource not found")
- errCanceled = errs.ErrCanceled("query canceled")
- errConnectionFailed = errs.ErrCanceled("failed to connect to database")
-)
-
-// AdaptError converts specific known-driver errors into wrapped storage errors.
-func (d Driver) AdaptError(err error) error {
- if err == nil {
- return nil
- }
-
- if errors.Is(err, sql.ErrNoRows) {
- return errNotFound
- }
-
- switch d {
- case SQLite, LibSQL:
- return adaptSQLiteError(err)
- case CockroachDB, Postgres:
- return adaptPostgresError(err)
- case MySQL:
- return adaptMySQLError(err)
- }
-
- return err
-}
-
-func adaptSQLiteError(err error) error {
- var serr sqlite3.Error
-
- if errors.As(err, &serr) {
- if serr.Code == sqlite3.ErrConstraint {
- switch serr.ExtendedCode {
- case sqlite3.ErrConstraintForeignKey:
- return errForeignKeyNotFound
- case sqlite3.ErrConstraintUnique:
- return errNotUnique
- }
-
- return errConstraintViolated
- }
- }
-
- return err
-}
-
-func adaptPostgresError(err error) error {
- const (
- constraintForeignKeyErr = "23503" // "foreign_key_violation"
- constraintUniqueErr = "23505" // "unique_violation"
- queryCanceled = "57014" // "query_canceled"
- )
-
- var perr *pgconn.PgError
-
- if errors.As(err, &perr) {
- switch perr.Code {
- case constraintUniqueErr:
- return errNotUnique
- case constraintForeignKeyErr:
- return errForeignKeyNotFound
- case queryCanceled:
- return errCanceled
- }
- }
-
- var cerr *pgconn.ConnectError
- if errors.As(err, &cerr) {
- return errConnectionFailed
- }
-
- return err
-}
-
-func adaptMySQLError(err error) error {
- const (
- constraintForeignKeyErrCode uint16 = 1452
- constraintUniqueErrCode uint16 = 1062
- )
-
- var merr *mysql.MySQLError
-
- if errors.As(err, &merr) {
- switch merr.Number {
- case constraintForeignKeyErrCode:
- return errForeignKeyNotFound
- case constraintUniqueErrCode:
- return errNotUnique
- }
- }
-
- return err
-}
diff --git a/internal/storage/sql/errors_test.go b/internal/storage/sql/errors_test.go
deleted file mode 100644
index d4732f45bb..0000000000
--- a/internal/storage/sql/errors_test.go
+++ /dev/null
@@ -1,172 +0,0 @@
-package sql
-
-import (
- "database/sql"
- "fmt"
- "testing"
-
- "github.com/go-sql-driver/mysql"
- "github.com/jackc/pgx/v5/pgconn"
- "github.com/mattn/go-sqlite3"
- "github.com/stretchr/testify/require"
- "go.flipt.io/flipt/errors"
-)
-
-func errPtr[E error](e E) *E {
- return &e
-}
-
-func Test_AdaptError(t *testing.T) {
- for _, test := range []struct {
- driver Driver
- inputErr error
- // if outputErrAs nil then test will ensure input is returned
- outputErrAs any
- }{
- // No driver
- {},
- // All drivers
- {
- driver: SQLite,
- inputErr: sql.ErrNoRows,
- outputErrAs: errPtr(errors.ErrNotFound("")),
- },
- {
- driver: SQLite,
- inputErr: fmt.Errorf("wrapped no rows: %w", sql.ErrNoRows),
- outputErrAs: errPtr(errors.ErrNotFound("")),
- },
- {
- driver: Postgres,
- inputErr: sql.ErrNoRows,
- outputErrAs: errPtr(errors.ErrNotFound("")),
- },
- {
- driver: Postgres,
- inputErr: fmt.Errorf("wrapped no rows: %w", sql.ErrNoRows),
- outputErrAs: errPtr(errors.ErrNotFound("")),
- },
- {
- driver: MySQL,
- inputErr: sql.ErrNoRows,
- outputErrAs: errPtr(errors.ErrNotFound("")),
- },
- {
- driver: MySQL,
- inputErr: fmt.Errorf("wrapped no rows: %w", sql.ErrNoRows),
- outputErrAs: errPtr(errors.ErrNotFound("")),
- },
- // SQLite
- // Unchanged errors
- {driver: SQLite},
- {
- driver: SQLite,
- inputErr: sqlite3.Error{},
- },
- {
- driver: SQLite,
- inputErr: sqlite3.Error{Code: sqlite3.ErrTooBig},
- },
- // Adjusted errors
- {
- driver: SQLite,
- inputErr: sqlite3.Error{
- Code: sqlite3.ErrConstraint,
- ExtendedCode: sqlite3.ErrConstraintCheck,
- },
- outputErrAs: errPtr(errors.ErrInvalid("")),
- },
- {
- driver: SQLite,
- inputErr: sqlite3.Error{
- Code: sqlite3.ErrConstraint,
- ExtendedCode: sqlite3.ErrConstraintForeignKey,
- },
- outputErrAs: errPtr(errors.ErrNotFound("")),
- },
- {
- driver: SQLite,
- inputErr: sqlite3.Error{
- Code: sqlite3.ErrConstraint,
- ExtendedCode: sqlite3.ErrConstraintUnique,
- },
- outputErrAs: errPtr(errors.ErrInvalid("")),
- },
- // Postgres
- // Unchanged errors
- {driver: Postgres},
- {
- driver: Postgres,
- inputErr: &pgconn.PgError{},
- },
- {
- driver: Postgres,
- inputErr: &pgconn.PgError{Code: "01000"},
- },
- // Adjusted errors
- {
- driver: Postgres,
- // foreign_key_violation
- inputErr: &pgconn.PgError{Code: "23503"},
- outputErrAs: errPtr(errors.ErrNotFound("")),
- },
- {
- driver: Postgres,
- // unique_violation
- inputErr: &pgconn.PgError{Code: "23505"},
- outputErrAs: errPtr(errors.ErrInvalid("")),
- },
- {
- driver: Postgres,
- // connection error
- inputErr: &pgconn.ConnectError{},
- outputErrAs: errPtr(errConnectionFailed),
- },
- // MySQL
- // Unchanged errors
- {driver: MySQL},
- {
- driver: MySQL,
- inputErr: &mysql.MySQLError{},
- },
- {
- driver: MySQL,
- inputErr: &mysql.MySQLError{Number: uint16(1000)},
- },
- // Adjusted errors
- {
- driver: MySQL,
- // foreign_key_violation
- inputErr: &mysql.MySQLError{Number: uint16(1452)},
- outputErrAs: errPtr(errors.ErrNotFound("")),
- },
- {
- driver: MySQL,
- // unique_violation
- inputErr: &mysql.MySQLError{Number: uint16(1062)},
- outputErrAs: errPtr(errors.ErrInvalid("")),
- },
- } {
- test := test
-
- outputs := test.outputErrAs
- if outputs == nil {
- outputs = test.inputErr
- }
-
- name := fmt.Sprintf("(%v).AdaptError(%v) == %T", test.driver, test.inputErr, outputs)
-
- t.Run(name, func(t *testing.T) {
- err := test.driver.AdaptError(test.inputErr)
- if test.outputErrAs == nil {
- // given the output expectation is nil we ensure the input error
- // is returned unchanged
- require.Equal(t, test.inputErr, err, "input error was changed unexpectedly")
- return
- }
-
- // otherwise, we ensure returned error matches via errors.Is
- require.ErrorAs(t, err, test.outputErrAs)
- })
- }
-}
diff --git a/internal/storage/sql/evaluation_test.go b/internal/storage/sql/evaluation_test.go
deleted file mode 100644
index 7da4ce8d6a..0000000000
--- a/internal/storage/sql/evaluation_test.go
+++ /dev/null
@@ -1,956 +0,0 @@
-package sql_test
-
-import (
- "context"
- "fmt"
- "math/rand"
- "os"
- "testing"
- "time"
-
- "github.com/google/uuid"
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
- "go.flipt.io/flipt/internal/ext"
- "go.flipt.io/flipt/internal/server"
- "go.flipt.io/flipt/internal/server/evaluation"
- "go.flipt.io/flipt/internal/storage"
- flipt "go.flipt.io/flipt/rpc/flipt"
- rpcevaluation "go.flipt.io/flipt/rpc/flipt/evaluation"
- "go.uber.org/zap"
-)
-
-func (s *DBTestSuite) TestGetEvaluationRules() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
-
- // constraint 1
- _, err = s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- SegmentKey: segment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "EQ",
- Value: "bar",
- })
-
- require.NoError(t, err)
-
- // constraint 2
- _, err = s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- SegmentKey: segment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foz",
- Operator: "EQ",
- Value: "baz",
- })
-
- require.NoError(t, err)
-
- // rule rank 1
- rule1, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- FlagKey: flag.Key,
- SegmentKeys: []string{segment.Key},
- Rank: 1,
- })
-
- require.NoError(t, err)
-
- // rule rank 2
- rule2, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- FlagKey: flag.Key,
- SegmentKeys: []string{segment.Key},
- Rank: 2,
- })
-
- require.NoError(t, err)
-
- evaluationRules, err := s.store.GetEvaluationRules(context.TODO(), storage.NewResource(storage.DefaultNamespace, flag.Key))
- require.NoError(t, err)
-
- assert.NotEmpty(t, evaluationRules)
- assert.Len(t, evaluationRules, 2)
-
- assert.Equal(t, rule1.Id, evaluationRules[0].ID)
- assert.Equal(t, storage.DefaultNamespace, evaluationRules[0].NamespaceKey)
- assert.Equal(t, rule1.FlagKey, evaluationRules[0].FlagKey)
-
- assert.Equal(t, rule1.SegmentKey, evaluationRules[0].Segments[segment.Key].SegmentKey)
- assert.Equal(t, segment.MatchType, evaluationRules[0].Segments[segment.Key].MatchType)
- assert.Equal(t, rule1.Rank, evaluationRules[0].Rank)
- assert.Len(t, evaluationRules[0].Segments[segment.Key].Constraints, 2)
-
- assert.Equal(t, rule2.Id, evaluationRules[1].ID)
- assert.Equal(t, storage.DefaultNamespace, evaluationRules[1].NamespaceKey)
- assert.Equal(t, rule2.FlagKey, evaluationRules[1].FlagKey)
-
- assert.Equal(t, rule2.SegmentKey, evaluationRules[1].Segments[segment.Key].SegmentKey)
- assert.Equal(t, segment.MatchType, evaluationRules[1].Segments[segment.Key].MatchType)
- assert.Equal(t, rule2.Rank, evaluationRules[1].Rank)
- assert.Len(t, evaluationRules[1].Segments[segment.Key].Constraints, 2)
-}
-
-func (s *DBTestSuite) TestGetEvaluationRules_NoNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
-
- // constraint 1
- _, err = s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- SegmentKey: segment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "EQ",
- Value: "bar",
- })
-
- require.NoError(t, err)
-
- // constraint 2
- _, err = s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- SegmentKey: segment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foz",
- Operator: "EQ",
- Value: "baz",
- })
-
- require.NoError(t, err)
-
- // rule rank 1
- rule1, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- FlagKey: flag.Key,
- SegmentKeys: []string{segment.Key},
- Rank: 1,
- })
-
- require.NoError(t, err)
-
- // rule rank 2
- rule2, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- FlagKey: flag.Key,
- SegmentKeys: []string{segment.Key},
- Rank: 2,
- })
-
- require.NoError(t, err)
-
- evaluationRules, err := s.store.GetEvaluationRules(context.TODO(), storage.NewResource("", flag.Key))
- require.NoError(t, err)
-
- assert.NotEmpty(t, evaluationRules)
- assert.Len(t, evaluationRules, 2)
-
- assert.Equal(t, rule1.Id, evaluationRules[0].ID)
- assert.Equal(t, storage.DefaultNamespace, evaluationRules[0].NamespaceKey)
- assert.Equal(t, rule1.FlagKey, evaluationRules[0].FlagKey)
-
- assert.Equal(t, rule1.SegmentKey, evaluationRules[0].Segments[segment.Key].SegmentKey)
- assert.Equal(t, segment.MatchType, evaluationRules[0].Segments[segment.Key].MatchType)
- assert.Equal(t, rule1.Rank, evaluationRules[0].Rank)
- assert.Len(t, evaluationRules[0].Segments[segment.Key].Constraints, 2)
-
- assert.Equal(t, rule2.Id, evaluationRules[1].ID)
- assert.Equal(t, storage.DefaultNamespace, evaluationRules[1].NamespaceKey)
- assert.Equal(t, rule2.FlagKey, evaluationRules[1].FlagKey)
-
- assert.Equal(t, rule2.SegmentKey, evaluationRules[1].Segments[segment.Key].SegmentKey)
- assert.Equal(t, segment.MatchType, evaluationRules[1].Segments[segment.Key].MatchType)
- assert.Equal(t, rule2.Rank, evaluationRules[1].Rank)
- assert.Len(t, evaluationRules[1].Segments[segment.Key].Constraints, 2)
-}
-
-func (s *DBTestSuite) TestGetEvaluationRulesNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
-
- firstSegment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
-
- secondSegment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: "another_segment",
- Name: "foo",
- Description: "bar",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
-
- // constraint 1
- _, err = s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- NamespaceKey: s.namespace,
- SegmentKey: firstSegment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "EQ",
- Value: "bar",
- })
-
- require.NoError(t, err)
-
- // constraint 2
- _, err = s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- NamespaceKey: s.namespace,
- SegmentKey: firstSegment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foz",
- Operator: "EQ",
- Value: "baz",
- })
-
- require.NoError(t, err)
-
- // rule rank 1
- rule1, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- SegmentKeys: []string{firstSegment.Key, secondSegment.Key},
- Rank: 1,
- })
-
- require.NoError(t, err)
-
- evaluationRules, err := s.store.GetEvaluationRules(context.TODO(), storage.NewResource(s.namespace, flag.Key))
- require.NoError(t, err)
-
- assert.NotEmpty(t, evaluationRules)
- assert.Len(t, evaluationRules, 1)
-
- assert.Equal(t, rule1.Id, evaluationRules[0].ID)
- assert.Equal(t, s.namespace, evaluationRules[0].NamespaceKey)
- assert.Equal(t, rule1.FlagKey, evaluationRules[0].FlagKey)
-
- assert.Equal(t, firstSegment.Key, evaluationRules[0].Segments[firstSegment.Key].SegmentKey)
- assert.Equal(t, firstSegment.MatchType, evaluationRules[0].Segments[firstSegment.Key].MatchType)
- assert.Equal(t, rule1.Rank, evaluationRules[0].Rank)
- assert.Len(t, evaluationRules[0].Segments[firstSegment.Key].Constraints, 2)
-
- assert.Equal(t, secondSegment.Key, evaluationRules[0].Segments[secondSegment.Key].SegmentKey)
- assert.Equal(t, secondSegment.MatchType, evaluationRules[0].Segments[secondSegment.Key].MatchType)
- assert.Equal(t, rule1.Rank, evaluationRules[0].Rank)
- assert.Empty(t, evaluationRules[0].Segments[secondSegment.Key].Constraints)
-}
-
-func (s *DBTestSuite) TestGetEvaluationDistributions() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
-
- // variant 1
- variant1, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: "foo",
- })
-
- require.NoError(t, err)
-
- // variant 2
- variant2, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: "bar",
- Attachment: `{"key2": "value2"}`,
- })
-
- require.NoError(t, err)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
-
- _, err = s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- SegmentKey: segment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "EQ",
- Value: "bar",
- })
-
- require.NoError(t, err)
-
- rule, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: 1,
- })
-
- require.NoError(t, err)
-
- // 50/50 distribution
- _, err = s.store.CreateDistribution(context.TODO(), &flipt.CreateDistributionRequest{
- FlagKey: flag.Key,
- RuleId: rule.Id,
- VariantId: variant1.Id,
- Rollout: 50.00,
- })
-
- // required for MySQL since it only s.stores timestamps to the second and not millisecond granularity
- time.Sleep(1 * time.Second)
-
- require.NoError(t, err)
-
- _, err = s.store.CreateDistribution(context.TODO(), &flipt.CreateDistributionRequest{
- FlagKey: flag.Key,
- RuleId: rule.Id,
- VariantId: variant2.Id,
- Rollout: 50.00,
- })
-
- require.NoError(t, err)
-
- evaluationDistributions, err := s.store.GetEvaluationDistributions(context.TODO(), storage.NewResource(flag.NamespaceKey, flag.Key), storage.NewID(rule.Id))
- require.NoError(t, err)
-
- assert.Len(t, evaluationDistributions, 2)
-
- assert.NotEmpty(t, evaluationDistributions[0].ID)
- assert.Equal(t, rule.Id, evaluationDistributions[0].RuleID)
- assert.Equal(t, variant1.Id, evaluationDistributions[0].VariantID)
- assert.Equal(t, variant1.Key, evaluationDistributions[0].VariantKey)
- assert.InDelta(t, 50.00, evaluationDistributions[0].Rollout, 0)
-
- assert.NotEmpty(t, evaluationDistributions[1].ID)
- assert.Equal(t, rule.Id, evaluationDistributions[1].RuleID)
- assert.Equal(t, variant2.Id, evaluationDistributions[1].VariantID)
- assert.Equal(t, variant2.Key, evaluationDistributions[1].VariantKey)
- assert.Equal(t, `{"key2":"value2"}`, evaluationDistributions[1].VariantAttachment)
- assert.InDelta(t, 50.00, evaluationDistributions[1].Rollout, 0)
-}
-
-func (s *DBTestSuite) TestGetEvaluationDistributionsNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
-
- // variant 1
- variant1, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Key: "foo",
- })
-
- require.NoError(t, err)
-
- // variant 2
- variant2, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Key: "bar",
- Attachment: `{"key2": "value2"}`,
- })
-
- require.NoError(t, err)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
-
- _, err = s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- NamespaceKey: s.namespace,
- SegmentKey: segment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "EQ",
- Value: "bar",
- })
-
- require.NoError(t, err)
-
- rule, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: 1,
- })
-
- require.NoError(t, err)
-
- // 50/50 distribution
- _, err = s.store.CreateDistribution(context.TODO(), &flipt.CreateDistributionRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- RuleId: rule.Id,
- VariantId: variant1.Id,
- Rollout: 50.00,
- })
-
- // required for MySQL since it only s.stores timestamps to the second and not millisecond granularity
- time.Sleep(1 * time.Second)
-
- require.NoError(t, err)
-
- _, err = s.store.CreateDistribution(context.TODO(), &flipt.CreateDistributionRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- RuleId: rule.Id,
- VariantId: variant2.Id,
- Rollout: 50.00,
- })
-
- require.NoError(t, err)
-
- evaluationDistributions, err := s.store.GetEvaluationDistributions(context.TODO(), storage.NewResource(flag.NamespaceKey, flag.Key), storage.NewID(rule.Id))
- require.NoError(t, err)
-
- assert.Len(t, evaluationDistributions, 2)
-
- assert.NotEmpty(t, evaluationDistributions[0].ID)
- assert.Equal(t, rule.Id, evaluationDistributions[0].RuleID)
- assert.Equal(t, variant1.Id, evaluationDistributions[0].VariantID)
- assert.Equal(t, variant1.Key, evaluationDistributions[0].VariantKey)
- assert.InDelta(t, 50.00, evaluationDistributions[0].Rollout, 0)
-
- assert.NotEmpty(t, evaluationDistributions[1].ID)
- assert.Equal(t, rule.Id, evaluationDistributions[1].RuleID)
- assert.Equal(t, variant2.Id, evaluationDistributions[1].VariantID)
- assert.Equal(t, variant2.Key, evaluationDistributions[1].VariantKey)
- assert.Equal(t, `{"key2":"value2"}`, evaluationDistributions[1].VariantAttachment)
- assert.InDelta(t, 50.00, evaluationDistributions[1].Rollout, 0)
-}
-
-// https://github.com/flipt-io/flipt/issues/229
-func (s *DBTestSuite) TestGetEvaluationDistributions_MaintainOrder() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
-
- // variant 1
- variant1, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: "foo",
- })
-
- require.NoError(t, err)
-
- // variant 2
- variant2, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: "bar",
- })
-
- require.NoError(t, err)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
-
- _, err = s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- SegmentKey: segment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "EQ",
- Value: "bar",
- })
-
- require.NoError(t, err)
-
- rule, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: 1,
- })
-
- require.NoError(t, err)
-
- // 80/20 distribution
- dist1, err := s.store.CreateDistribution(context.TODO(), &flipt.CreateDistributionRequest{
- FlagKey: flag.Key,
- RuleId: rule.Id,
- VariantId: variant1.Id,
- Rollout: 80.00,
- })
-
- require.NoError(t, err)
-
- // required for MySQL since it only s.stores timestamps to the second and not millisecond granularity
- time.Sleep(1 * time.Second)
-
- dist2, err := s.store.CreateDistribution(context.TODO(), &flipt.CreateDistributionRequest{
- FlagKey: flag.Key,
- RuleId: rule.Id,
- VariantId: variant2.Id,
- Rollout: 20.00,
- })
-
- require.NoError(t, err)
-
- evaluationDistributions, err := s.store.GetEvaluationDistributions(context.TODO(), storage.NewResource(flag.NamespaceKey, flag.Key), storage.NewID(rule.Id))
- require.NoError(t, err)
-
- assert.Len(t, evaluationDistributions, 2)
-
- assert.NotEmpty(t, evaluationDistributions[0].ID)
- assert.Equal(t, rule.Id, evaluationDistributions[0].RuleID)
- assert.Equal(t, variant1.Id, evaluationDistributions[0].VariantID)
- assert.Equal(t, variant1.Key, evaluationDistributions[0].VariantKey)
- assert.InDelta(t, 80.00, evaluationDistributions[0].Rollout, 0)
-
- assert.NotEmpty(t, evaluationDistributions[1].ID)
- assert.Equal(t, rule.Id, evaluationDistributions[1].RuleID)
- assert.Equal(t, variant2.Id, evaluationDistributions[1].VariantID)
- assert.Equal(t, variant2.Key, evaluationDistributions[1].VariantKey)
- assert.InDelta(t, 20.00, evaluationDistributions[1].Rollout, 0)
-
- // update dist1 with same values
- _, err = s.store.UpdateDistribution(context.TODO(), &flipt.UpdateDistributionRequest{
- Id: dist1.Id,
- FlagKey: flag.Key,
- RuleId: rule.Id,
- VariantId: variant1.Id,
- Rollout: 80.00,
- })
-
- require.NoError(t, err)
-
- // required for MySQL since it only s.stores timestamps to the second and not millisecond granularity
- time.Sleep(1 * time.Second)
-
- // update dist2 with same values
- _, err = s.store.UpdateDistribution(context.TODO(), &flipt.UpdateDistributionRequest{
- Id: dist2.Id,
- FlagKey: flag.Key,
- RuleId: rule.Id,
- VariantId: variant2.Id,
- Rollout: 20.00,
- })
-
- require.NoError(t, err)
-
- evaluationDistributions, err = s.store.GetEvaluationDistributions(context.TODO(), storage.NewResource(flag.NamespaceKey, flag.Key), storage.NewID(rule.Id))
- require.NoError(t, err)
-
- assert.Len(t, evaluationDistributions, 2)
-
- assert.NotEmpty(t, evaluationDistributions[0].ID)
- assert.Equal(t, rule.Id, evaluationDistributions[0].RuleID)
- assert.Equal(t, variant1.Id, evaluationDistributions[0].VariantID)
- assert.Equal(t, variant1.Key, evaluationDistributions[0].VariantKey)
- assert.InDelta(t, 80.00, evaluationDistributions[0].Rollout, 0)
-
- assert.NotEmpty(t, evaluationDistributions[1].ID)
- assert.Equal(t, rule.Id, evaluationDistributions[1].RuleID)
- assert.Equal(t, variant2.Id, evaluationDistributions[1].VariantID)
- assert.Equal(t, variant2.Key, evaluationDistributions[1].VariantKey)
- assert.InDelta(t, 20.00, evaluationDistributions[1].Rollout, 0)
-}
-
-func (s *DBTestSuite) TestGetEvaluationRollouts() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- })
-
- require.NoError(t, err)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
-
- _, err = s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- FlagKey: flag.Key,
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: 50.0,
- Value: false,
- },
- },
- })
-
- require.NoError(t, err)
-
- _, err = s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- FlagKey: flag.Key,
- Rank: 2,
- Rule: &flipt.CreateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKeys: []string{segment.Key},
- Value: true,
- },
- },
- })
-
- require.NoError(t, err)
-
- evaluationRollouts, err := s.store.GetEvaluationRollouts(context.TODO(), storage.NewResource(storage.DefaultNamespace, flag.Key))
- require.NoError(t, err)
-
- assert.Len(t, evaluationRollouts, 2)
-
- assert.Equal(t, "default", evaluationRollouts[0].NamespaceKey)
- assert.Equal(t, int32(1), evaluationRollouts[0].Rank)
- assert.NotNil(t, evaluationRollouts[0].Threshold)
- assert.InDelta(t, 50.0, evaluationRollouts[0].Threshold.Percentage, 0)
- assert.False(t, evaluationRollouts[0].Threshold.Value, "percentage value is false")
-
- assert.Equal(t, "default", evaluationRollouts[1].NamespaceKey)
- assert.Equal(t, int32(2), evaluationRollouts[1].Rank)
- assert.NotNil(t, evaluationRollouts[1].Segment)
-
- assert.Contains(t, evaluationRollouts[1].Segment.Segments, segment.Key)
- assert.Equal(t, segment.MatchType, evaluationRollouts[1].Segment.Segments[segment.Key].MatchType)
-
- assert.True(t, evaluationRollouts[1].Segment.Value, "segment value is true")
-}
-
-func (s *DBTestSuite) TestGetEvaluationRollouts_NoNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- })
-
- require.NoError(t, err)
-
- firstSegment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
-
- secondSegment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: "another_segment",
- Name: "foo",
- Description: "bar",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
-
- _, err = s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- SegmentKey: firstSegment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "EQ",
- Value: "bar",
- })
-
- require.NoError(t, err)
-
- _, err = s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- FlagKey: flag.Key,
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: 50.0,
- Value: false,
- },
- },
- })
-
- require.NoError(t, err)
-
- _, err = s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- FlagKey: flag.Key,
- Rank: 2,
- Rule: &flipt.CreateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKeys: []string{firstSegment.Key, secondSegment.Key},
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- Value: true,
- },
- },
- })
-
- require.NoError(t, err)
-
- evaluationRollouts, err := s.store.GetEvaluationRollouts(context.TODO(), storage.NewResource("", flag.Key))
- require.NoError(t, err)
-
- assert.Len(t, evaluationRollouts, 2)
-
- assert.Equal(t, "default", evaluationRollouts[0].NamespaceKey)
- assert.Equal(t, int32(1), evaluationRollouts[0].Rank)
- assert.NotNil(t, evaluationRollouts[0].Threshold)
- assert.InDelta(t, 50.0, evaluationRollouts[0].Threshold.Percentage, 0)
- assert.False(t, evaluationRollouts[0].Threshold.Value, "percentage value is false")
-
- assert.Equal(t, "default", evaluationRollouts[1].NamespaceKey)
- assert.Equal(t, int32(2), evaluationRollouts[1].Rank)
- assert.NotNil(t, evaluationRollouts[1].Segment)
-
- assert.Equal(t, firstSegment.Key, evaluationRollouts[1].Segment.Segments[firstSegment.Key].SegmentKey)
- assert.Equal(t, flipt.SegmentOperator_AND_SEGMENT_OPERATOR, evaluationRollouts[1].Segment.SegmentOperator)
- assert.Len(t, evaluationRollouts[1].Segment.Segments[firstSegment.Key].Constraints, 1)
-
- assert.Equal(t, secondSegment.Key, evaluationRollouts[1].Segment.Segments[secondSegment.Key].SegmentKey)
- assert.Empty(t, evaluationRollouts[1].Segment.Segments[secondSegment.Key].Constraints)
-}
-
-func (s *DBTestSuite) TestGetEvaluationRollouts_NonDefaultNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- })
-
- require.NoError(t, err)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
-
- _, err = s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- NamespaceKey: s.namespace,
- SegmentKey: segment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "EQ",
- Value: "bar",
- })
-
- require.NoError(t, err)
-
- _, err = s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: 50.0,
- Value: false,
- },
- },
- })
-
- require.NoError(t, err)
-
- _, err = s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Rank: 2,
- Rule: &flipt.CreateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKey: segment.Key,
- Value: true,
- },
- },
- })
-
- require.NoError(t, err)
-
- evaluationRollouts, err := s.store.GetEvaluationRollouts(context.TODO(), storage.NewResource(s.namespace, flag.Key))
- require.NoError(t, err)
-
- assert.Len(t, evaluationRollouts, 2)
-
- assert.Equal(t, s.namespace, evaluationRollouts[0].NamespaceKey)
- assert.Equal(t, int32(1), evaluationRollouts[0].Rank)
- assert.NotNil(t, evaluationRollouts[0].Threshold)
- assert.InDelta(t, 50.0, evaluationRollouts[0].Threshold.Percentage, 0)
- assert.False(t, evaluationRollouts[0].Threshold.Value, "percentage value is false")
-
- assert.Equal(t, s.namespace, evaluationRollouts[1].NamespaceKey)
- assert.Equal(t, int32(2), evaluationRollouts[1].Rank)
- assert.NotNil(t, evaluationRollouts[1].Segment)
-
- assert.Contains(t, evaluationRollouts[1].Segment.Segments, segment.Key)
- assert.Equal(t, segment.MatchType, evaluationRollouts[1].Segment.Segments[segment.Key].MatchType)
-
- assert.True(t, evaluationRollouts[1].Segment.Value, "segment value is true")
-}
-
-func Benchmark_EvaluationV1AndV2(b *testing.B) {
- s := new(DBTestSuite)
-
- t := &testing.T{}
- s.SetT(t)
- s.SetupSuite()
-
- server := server.New(zap.NewNop(), s.store)
- importer := ext.NewImporter(server)
- reader, err := os.Open("./testdata/benchmark_test.yml")
- defer func() {
- _ = reader.Close()
- }()
- skipExistingFalse := false
-
- require.NoError(t, err)
-
- err = importer.Import(context.TODO(), ext.EncodingYML, reader, skipExistingFalse)
- require.NoError(t, err)
-
- flagKeys := make([]string, 0, 10)
-
- for i := 0; i < 10; i++ {
- var flagKey string
-
- num := rand.Intn(50) + 1
-
- if num < 10 {
- flagKey = fmt.Sprintf("flag_00%d", num)
- } else {
- flagKey = fmt.Sprintf("flag_0%d", num)
- }
-
- flagKeys = append(flagKeys, flagKey)
- }
-
- eserver := evaluation.New(zap.NewNop(), s.store)
-
- b.ResetTimer()
-
- for _, flagKey := range flagKeys {
- entityId := uuid.NewString()
- ereq := &flipt.EvaluationRequest{
- FlagKey: flagKey,
- EntityId: entityId,
- }
-
- ev2req := &rpcevaluation.EvaluationRequest{
- FlagKey: flagKey,
- EntityId: entityId,
- }
-
- b.Run(fmt.Sprintf("evaluation-v1-%s", flagKey), func(b *testing.B) {
- for i := 0; i < b.N; i++ {
- evaluation, err := server.Evaluate(context.TODO(), ereq)
-
- require.NoError(t, err)
- assert.NotEmpty(t, evaluation)
- }
- })
-
- b.Run(fmt.Sprintf("variant-evaluation-%s", flagKey), func(b *testing.B) {
- for i := 0; i < b.N; i++ {
- variant, err := eserver.Variant(context.TODO(), ev2req)
-
- require.NoError(t, err)
- assert.NotEmpty(t, variant)
- }
- })
- }
-
- for _, flagKey := range []string{"flag_boolean", "another_boolean_flag"} {
- breq := &rpcevaluation.EvaluationRequest{
- FlagKey: flagKey,
- EntityId: uuid.NewString(),
- }
-
- b.Run(fmt.Sprintf("boolean-evaluation-%s", flagKey), func(b *testing.B) {
- for i := 0; i < b.N; i++ {
- boolean, err := eserver.Boolean(context.TODO(), breq)
-
- require.NoError(t, err)
- assert.NotEmpty(t, boolean)
- }
- })
- }
-
- s.TearDownSuite()
-}
diff --git a/internal/storage/sql/flag_test.go b/internal/storage/sql/flag_test.go
deleted file mode 100644
index 85a3db544d..0000000000
--- a/internal/storage/sql/flag_test.go
+++ /dev/null
@@ -1,1823 +0,0 @@
-package sql_test
-
-import (
- "context"
- "encoding/base64"
- "encoding/json"
- "fmt"
- "testing"
- "time"
-
- "go.flipt.io/flipt/internal/storage"
- fliptsql "go.flipt.io/flipt/internal/storage/sql"
- "go.flipt.io/flipt/internal/storage/sql/common"
- flipt "go.flipt.io/flipt/rpc/flipt"
- "google.golang.org/protobuf/types/known/structpb"
-
- "github.com/google/uuid"
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
-)
-
-func (s *DBTestSuite) TestGetFlag() {
- t := s.T()
-
- metadataMap := map[string]any{
- "key": "value",
- }
-
- metadata, err := structpb.NewStruct(metadataMap)
- require.NoError(t, err, "Failed to create metadata struct")
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Metadata: metadata,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- got, err := s.store.GetFlag(context.TODO(), storage.NewResource(storage.DefaultNamespace, flag.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, got)
-
- assert.Equal(t, storage.DefaultNamespace, got.NamespaceKey)
- assert.Equal(t, flag.Key, got.Key)
- assert.Equal(t, flag.Name, got.Name)
- assert.Equal(t, flag.Description, got.Description)
- assert.Equal(t, flag.Enabled, got.Enabled)
- assert.Equal(t, flag.Metadata.String(), got.Metadata.String())
-
- assert.NotZero(t, flag.CreatedAt)
- assert.NotZero(t, flag.UpdatedAt)
-}
-
-func (s *DBTestSuite) TestGetFlagNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- got, err := s.store.GetFlag(context.TODO(), storage.NewResource(s.namespace, flag.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, got)
-
- assert.Equal(t, s.namespace, got.NamespaceKey)
- assert.Equal(t, flag.Key, got.Key)
- assert.Equal(t, flag.Name, got.Name)
- assert.Equal(t, flag.Description, got.Description)
- assert.Equal(t, flag.Enabled, got.Enabled)
- assert.NotZero(t, flag.CreatedAt)
- assert.NotZero(t, flag.UpdatedAt)
-}
-
-func (s *DBTestSuite) TestGetFlag_NotFound() {
- t := s.T()
-
- _, err := s.store.GetFlag(context.TODO(), storage.NewResource(storage.DefaultNamespace, "foo"))
- assert.EqualError(t, err, "flag \"default/foo\" not found")
-}
-
-func (s *DBTestSuite) TestGetFlagNamespace_NotFound() {
- t := s.T()
-
- _, err := s.store.GetFlag(context.TODO(), storage.NewResource(s.namespace, "foo"))
- assert.EqualError(t, err, fmt.Sprintf("flag \"%s/foo\" not found", s.namespace))
-}
-
-func (s *DBTestSuite) TestListFlags() {
- t := s.T()
-
- reqs := []*flipt.CreateFlagRequest{
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- },
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- }
-
- for _, req := range reqs {
- _, err := s.store.CreateFlag(context.TODO(), req)
- require.NoError(t, err)
- }
-
- _, err := s.store.ListFlags(context.TODO(), storage.ListWithOptions(
- storage.NewNamespace(storage.DefaultNamespace),
- storage.ListWithQueryParamOptions[storage.NamespaceRequest](storage.WithPageToken("Hello World"))),
- )
- require.EqualError(t, err, "pageToken is not valid: \"Hello World\"")
-
- res, err := s.store.ListFlags(context.TODO(), storage.ListWithOptions(storage.NewNamespace(storage.DefaultNamespace)))
- require.NoError(t, err)
-
- got := res.Results
- assert.NotEmpty(t, got)
-
- for _, flag := range got {
- assert.Equal(t, storage.DefaultNamespace, flag.NamespaceKey)
- assert.NotZero(t, flag.CreatedAt)
- assert.NotZero(t, flag.UpdatedAt)
- }
-}
-
-func (s *DBTestSuite) TestListFlagsNamespace() {
- t := s.T()
-
- reqs := []*flipt.CreateFlagRequest{
- {
- NamespaceKey: s.namespace,
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- },
- {
- NamespaceKey: s.namespace,
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- }
-
- for _, req := range reqs {
- _, err := s.store.CreateFlag(context.TODO(), req)
- require.NoError(t, err)
- }
-
- res, err := s.store.ListFlags(context.TODO(), storage.ListWithOptions(storage.NewNamespace(s.namespace)))
- require.NoError(t, err)
-
- got := res.Results
- assert.NotEmpty(t, got)
-
- for _, flag := range got {
- assert.Equal(t, s.namespace, flag.NamespaceKey)
- assert.NotZero(t, flag.CreatedAt)
- assert.NotZero(t, flag.UpdatedAt)
- }
-}
-
-func (s *DBTestSuite) TestListFlagsPagination_LimitOffset() {
- t := s.T()
-
- reqs := []*flipt.CreateFlagRequest{
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- },
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- },
- }
-
- for _, req := range reqs {
- if s.db.Driver == fliptsql.MySQL {
- // required for MySQL since it only s.stores timestamps to the second and not millisecond granularity
- time.Sleep(time.Second)
- }
- _, err := s.store.CreateFlag(context.TODO(), req)
- require.NoError(t, err)
- }
-
- oldest, middle, newest := reqs[0], reqs[1], reqs[2]
-
- // TODO: the ordering (DESC) is required because the default ordering is ASC and we are not clearing the DB between tests
- // get middle flag
- req := storage.ListWithOptions(
- storage.NewNamespace(storage.DefaultNamespace),
- storage.ListWithQueryParamOptions[storage.NamespaceRequest](
- storage.WithOrder(storage.OrderDesc), storage.WithLimit(1), storage.WithOffset(1),
- ),
- )
- res, err := s.store.ListFlags(context.TODO(), req)
- require.NoError(t, err)
-
- got := res.Results
- assert.Len(t, got, 1)
-
- assert.Equal(t, middle.Key, got[0].Key)
-
- // get first (newest) flag
- req = storage.ListWithOptions(
- storage.NewNamespace(storage.DefaultNamespace),
- storage.ListWithQueryParamOptions[storage.NamespaceRequest](
- storage.WithOrder(storage.OrderDesc), storage.WithLimit(1),
- ),
- )
- res, err = s.store.ListFlags(context.TODO(), req)
- require.NoError(t, err)
-
- got = res.Results
- assert.Len(t, got, 1)
-
- assert.Equal(t, newest.Key, got[0].Key)
-
- // get last (oldest) flag
- req = storage.ListWithOptions(
- storage.NewNamespace(storage.DefaultNamespace),
- storage.ListWithQueryParamOptions[storage.NamespaceRequest](
- storage.WithOrder(storage.OrderDesc), storage.WithLimit(1), storage.WithOffset(2),
- ),
- )
- res, err = s.store.ListFlags(context.TODO(), req)
- require.NoError(t, err)
-
- got = res.Results
- assert.Len(t, got, 1)
-
- assert.Equal(t, oldest.Key, got[0].Key)
-
- // get all flags
- req = storage.ListWithOptions(
- storage.NewNamespace(storage.DefaultNamespace),
- storage.ListWithQueryParamOptions[storage.NamespaceRequest](
- storage.WithOrder(storage.OrderDesc),
- ),
- )
- res, err = s.store.ListFlags(context.TODO(), req)
- require.NoError(t, err)
-
- got = res.Results
-
- assert.Equal(t, newest.Key, got[0].Key)
- assert.Equal(t, middle.Key, got[1].Key)
- assert.Equal(t, oldest.Key, got[2].Key)
-}
-
-func (s *DBTestSuite) TestListFlagsPagination_LimitWithNextPage() {
- t := s.T()
-
- reqs := []*flipt.CreateFlagRequest{
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- },
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- },
- }
-
- for _, req := range reqs {
- if s.db.Driver == fliptsql.MySQL {
- // required for MySQL since it only s.stores timestamps to the second and not millisecond granularity
- time.Sleep(time.Second)
- }
- _, err := s.store.CreateFlag(context.TODO(), req)
- require.NoError(t, err)
- }
-
- oldest, middle, newest := reqs[0], reqs[1], reqs[2]
-
- // TODO: the ordering (DESC) is required because the default ordering is ASC and we are not clearing the DB between tests
- // get newest flag
- req := storage.ListWithOptions(
- storage.NewNamespace(storage.DefaultNamespace),
- storage.ListWithQueryParamOptions[storage.NamespaceRequest](storage.WithOrder(storage.OrderDesc), storage.WithLimit(1)),
- )
- res, err := s.store.ListFlags(context.TODO(), req)
- require.NoError(t, err)
-
- got := res.Results
- assert.Len(t, got, 1)
- assert.Equal(t, newest.Key, got[0].Key)
- assert.NotEmpty(t, res.NextPageToken)
-
- pageToken := &common.PageToken{}
- pTokenB, err := base64.StdEncoding.DecodeString(res.NextPageToken)
- require.NoError(t, err)
-
- err = json.Unmarshal(pTokenB, pageToken)
- require.NoError(t, err)
- // next page should be the middle flag
- assert.Equal(t, middle.Key, pageToken.Key)
- assert.NotZero(t, pageToken.Offset)
-
- req.QueryParams.PageToken = res.NextPageToken
-
- // get middle flag
- res, err = s.store.ListFlags(context.TODO(), req)
- require.NoError(t, err)
-
- got = res.Results
- assert.Len(t, got, 1)
- assert.Equal(t, middle.Key, got[0].Key)
-
- pTokenB, err = base64.StdEncoding.DecodeString(res.NextPageToken)
- require.NoError(t, err)
-
- err = json.Unmarshal(pTokenB, pageToken)
-
- require.NoError(t, err)
- // next page should be the oldest flag
- assert.Equal(t, oldest.Key, pageToken.Key)
- assert.NotZero(t, pageToken.Offset)
-
- req.QueryParams.Limit = 1
- req.QueryParams.Order = storage.OrderDesc
- req.QueryParams.PageToken = res.NextPageToken
-
- // get oldest flag
- res, err = s.store.ListFlags(context.TODO(), req)
- require.NoError(t, err)
-
- got = res.Results
- assert.Len(t, got, 1)
- assert.Equal(t, oldest.Key, got[0].Key)
-
- req = storage.ListWithOptions(
- storage.NewNamespace(storage.DefaultNamespace),
- storage.ListWithQueryParamOptions[storage.NamespaceRequest](
- storage.WithOrder(storage.OrderDesc), storage.WithLimit(3),
- ),
- )
- // get all flags
- res, err = s.store.ListFlags(context.TODO(), req)
- require.NoError(t, err)
-
- got = res.Results
- assert.Len(t, got, 3)
- assert.Equal(t, newest.Key, got[0].Key)
- assert.Equal(t, middle.Key, got[1].Key)
- assert.Equal(t, oldest.Key, got[2].Key)
-}
-
-func (s *DBTestSuite) TestListFlagsPagination_FullWalk() {
- t := s.T()
-
- namespace := uuid.NewString()
-
- ctx := context.Background()
- _, err := s.store.CreateNamespace(ctx, &flipt.CreateNamespaceRequest{
- Key: namespace,
- })
- require.NoError(t, err)
-
- var (
- totalFlags = 9
- pageSize = uint64(3)
- )
-
- for i := 0; i < totalFlags; i++ {
- req := flipt.CreateFlagRequest{
- NamespaceKey: namespace,
- Key: fmt.Sprintf("flag_%03d", i),
- Name: "foo",
- Description: "bar",
- }
-
- _, err := s.store.CreateFlag(ctx, &req)
- require.NoError(t, err)
-
- for i := 0; i < 2; i++ {
- if i > 0 && s.db.Driver == fliptsql.MySQL {
- // required for MySQL since it only s.stores timestamps to the second and not millisecond granularity
- time.Sleep(time.Second)
- }
-
- _, err := s.store.CreateVariant(ctx, &flipt.CreateVariantRequest{
- NamespaceKey: namespace,
- FlagKey: req.Key,
- Key: fmt.Sprintf("variant_%d", i),
- })
- require.NoError(t, err)
- }
- }
-
- req := storage.ListWithOptions(
- storage.NewNamespace(namespace),
- storage.ListWithQueryParamOptions[storage.NamespaceRequest](
- storage.WithLimit(pageSize),
- ),
- )
- resp, err := s.store.ListFlags(ctx, req)
- require.NoError(t, err)
-
- found := resp.Results
- for token := resp.NextPageToken; token != ""; token = resp.NextPageToken {
- req.QueryParams.PageToken = token
- resp, err = s.store.ListFlags(ctx, req)
- require.NoError(t, err)
-
- found = append(found, resp.Results...)
- }
-
- require.Len(t, found, totalFlags)
-
- for i := 0; i < totalFlags; i++ {
- assert.Equal(t, namespace, found[i].NamespaceKey)
-
- expectedFlag := fmt.Sprintf("flag_%03d", i)
- assert.Equal(t, expectedFlag, found[i].Key)
- assert.Equal(t, "foo", found[i].Name)
- assert.Equal(t, "bar", found[i].Description)
-
- require.Len(t, found[i].Variants, 2)
- assert.Equal(t, namespace, found[i].Variants[0].NamespaceKey)
- assert.Equal(t, expectedFlag, found[i].Variants[0].FlagKey)
- assert.Equal(t, "variant_0", found[i].Variants[0].Key)
-
- assert.Equal(t, namespace, found[i].Variants[1].NamespaceKey)
- assert.Equal(t, expectedFlag, found[i].Variants[1].FlagKey)
- assert.Equal(t, "variant_1", found[i].Variants[1].Key)
- }
-}
-
-func (s *DBTestSuite) TestCreateFlag() {
- t := s.T()
-
- metadataMap := map[string]any{
- "key": "value",
- }
-
- metadata, _ := structpb.NewStruct(metadataMap)
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Metadata: metadata,
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, storage.DefaultNamespace, flag.NamespaceKey)
- assert.Equal(t, t.Name(), flag.Key)
- assert.Equal(t, "foo", flag.Name)
- assert.Equal(t, "bar", flag.Description)
- assert.True(t, flag.Enabled)
- assert.Equal(t, metadata.String(), flag.Metadata.String())
- assert.NotZero(t, flag.CreatedAt)
- assert.Equal(t, flag.CreatedAt.Seconds, flag.UpdatedAt.Seconds)
-}
-
-func (s *DBTestSuite) TestCreateFlagNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, s.namespace, flag.NamespaceKey)
- assert.Equal(t, t.Name(), flag.Key)
- assert.Equal(t, "foo", flag.Name)
- assert.Equal(t, "bar", flag.Description)
- assert.True(t, flag.Enabled)
- assert.NotZero(t, flag.CreatedAt)
- assert.Equal(t, flag.CreatedAt.Seconds, flag.UpdatedAt.Seconds)
-}
-
-func (s *DBTestSuite) TestCreateFlag_DuplicateKey() {
- t := s.T()
-
- _, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
-
- _, err = s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- assert.EqualError(t, err, "flag \"default/TestDBTestSuite/TestCreateFlag_DuplicateKey\" is not unique")
-}
-
-func (s *DBTestSuite) TestCreateFlagNamespace_DuplicateKey() {
- t := s.T()
-
- _, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
-
- _, err = s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- assert.EqualError(t, err, fmt.Sprintf("flag \"%s/%s\" is not unique", s.namespace, t.Name()))
-}
-
-func (s *DBTestSuite) TestUpdateFlag() {
- t := s.T()
-
- metadataMap := map[string]any{
- "key": "value",
- }
-
- metadata, _ := structpb.NewStruct(metadataMap)
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Metadata: metadata,
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, storage.DefaultNamespace, flag.NamespaceKey)
- assert.Equal(t, t.Name(), flag.Key)
- assert.Equal(t, "foo", flag.Name)
- assert.Equal(t, "bar", flag.Description)
- assert.True(t, flag.Enabled)
- assert.Equal(t, metadata.String(), flag.Metadata.String())
- assert.NotZero(t, flag.CreatedAt)
- assert.Equal(t, flag.CreatedAt.Seconds, flag.UpdatedAt.Seconds)
-
- updatedMetadataMap := map[string]any{
- "key": "value",
- "foo": "bar",
- }
-
- updatedMetadata, _ := structpb.NewStruct(updatedMetadataMap)
- updated, err := s.store.UpdateFlag(context.TODO(), &flipt.UpdateFlagRequest{
- Key: flag.Key,
- Name: flag.Name,
- Description: "foobar",
- Enabled: true,
- Metadata: updatedMetadata,
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, storage.DefaultNamespace, updated.NamespaceKey)
- assert.Equal(t, flag.Key, updated.Key)
- assert.Equal(t, flag.Name, updated.Name)
- assert.Equal(t, "foobar", updated.Description)
- assert.True(t, flag.Enabled)
- assert.Equal(t, updatedMetadata.String(), updated.Metadata.String())
- assert.NotZero(t, updated.CreatedAt)
- assert.NotZero(t, updated.UpdatedAt)
-}
-
-func (s *DBTestSuite) TestUpdateFlagNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, s.namespace, flag.NamespaceKey)
- assert.Equal(t, t.Name(), flag.Key)
- assert.Equal(t, "foo", flag.Name)
- assert.Equal(t, "bar", flag.Description)
- assert.True(t, flag.Enabled)
- assert.NotZero(t, flag.CreatedAt)
- assert.Equal(t, flag.CreatedAt.Seconds, flag.UpdatedAt.Seconds)
-
- updated, err := s.store.UpdateFlag(context.TODO(), &flipt.UpdateFlagRequest{
- NamespaceKey: s.namespace,
- Key: flag.Key,
- Name: flag.Name,
- Description: "foobar",
- Enabled: true,
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, s.namespace, updated.NamespaceKey)
- assert.Equal(t, flag.Key, updated.Key)
- assert.Equal(t, flag.Name, updated.Name)
- assert.Equal(t, "foobar", updated.Description)
- assert.True(t, flag.Enabled)
- assert.NotZero(t, updated.CreatedAt)
- assert.NotZero(t, updated.UpdatedAt)
-}
-
-func (s *DBTestSuite) TestUpdateFlag_NotFound() {
- t := s.T()
-
- _, err := s.store.UpdateFlag(context.TODO(), &flipt.UpdateFlagRequest{
- Key: "foo",
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- assert.EqualError(t, err, "flag \"default/foo\" not found")
-}
-
-func (s *DBTestSuite) TestUpdateFlagNamespace_NotFound() {
- t := s.T()
-
- _, err := s.store.UpdateFlag(context.TODO(), &flipt.UpdateFlagRequest{
- NamespaceKey: s.namespace,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- assert.EqualError(t, err, fmt.Sprintf("flag \"%s/foo\" not found", s.namespace))
-}
-
-func (s *DBTestSuite) TestUpdateFlag_DefaultVariant() {
- t := s.T()
-
- t.Run("update flag with default variant", func(t *testing.T) {
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Attachment: `{"key":"value"}`,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- _, err = s.store.UpdateFlag(context.TODO(), &flipt.UpdateFlagRequest{
- Key: flag.Key,
- Name: flag.Name,
- Description: "foobar",
- Enabled: true,
- DefaultVariantId: variant.Id,
- })
-
- require.NoError(t, err)
-
- // get the flag again
- flag, err = s.store.GetFlag(context.TODO(), storage.NewResource(storage.DefaultNamespace, flag.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
- assert.Equal(t, variant.Id, flag.DefaultVariant.Id)
- assert.Equal(t, variant.Key, flag.DefaultVariant.Key)
- assert.Equal(t, variant.Name, flag.DefaultVariant.Name)
- assert.Equal(t, variant.Description, flag.DefaultVariant.Description)
- assert.Equal(t, variant.Attachment, flag.DefaultVariant.Attachment)
- })
-
- t.Run("update flag with default variant not found", func(t *testing.T) {
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
-
- _, err = s.store.UpdateFlag(context.TODO(), &flipt.UpdateFlagRequest{
- Key: flag.Key,
- Name: flag.Name,
- Description: "foobar",
- Enabled: true,
- DefaultVariantId: "non-existent",
- })
-
- assert.EqualError(t, err, "variant \"non-existent\" not found for flag \"default/TestDBTestSuite/TestUpdateFlag_DefaultVariant/update_flag_with_default_variant_not_found\"")
- })
-
- t.Run("update flag with variant from different flag", func(t *testing.T) {
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
-
- flag2, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: fmt.Sprintf("%s_two", t.Name()),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag2.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Attachment: `{"key":"value"}`,
- })
-
- require.NoError(t, err)
-
- _, err = s.store.UpdateFlag(context.TODO(), &flipt.UpdateFlagRequest{
- Key: flag.Key,
- Name: flag.Name,
- Description: "foobar",
- Enabled: true,
- DefaultVariantId: variant.Id,
- })
-
- assert.EqualError(t, err, fmt.Sprintf("variant \"%s\" not found for flag \"%s/%s\"", variant.Id, "default", flag.Key))
- })
-
- t.Run("update flag with default variant in different namespace", func(t *testing.T) {
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Attachment: `{"key":"value"}`,
- })
-
- require.NoError(t, err)
-
- // flag in default namespace
- flag2, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
-
- // try to update flag in non-default namespace with default variant from default namespace
- _, err = s.store.UpdateFlag(context.TODO(), &flipt.UpdateFlagRequest{
- Key: flag2.Key,
- Name: flag2.Name,
- Description: flag2.Description,
- Enabled: true,
- DefaultVariantId: variant.Id,
- })
-
- assert.EqualError(t, err, fmt.Sprintf("variant \"%s\" not found for flag \"%s/%s\"", variant.Id, "default", flag2.Key))
- })
-}
-
-func (s *DBTestSuite) TestDeleteFlag() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- err = s.store.DeleteFlag(context.TODO(), &flipt.DeleteFlagRequest{Key: flag.Key})
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestDeleteFlagNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- err = s.store.DeleteFlag(context.TODO(), &flipt.DeleteFlagRequest{
- NamespaceKey: s.namespace,
- Key: flag.Key,
- })
-
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestDeleteFlag_NotFound() {
- t := s.T()
-
- err := s.store.DeleteFlag(context.TODO(), &flipt.DeleteFlagRequest{Key: "foo"})
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestDeleteFlagNamespace_NotFound() {
- t := s.T()
-
- err := s.store.DeleteFlag(context.TODO(), &flipt.DeleteFlagRequest{
- NamespaceKey: s.namespace,
- Key: "foo",
- })
-
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestCreateVariant() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- attachment := `{"key":"value"}`
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Attachment: attachment,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- assert.NotZero(t, variant.Id)
- assert.Equal(t, storage.DefaultNamespace, variant.NamespaceKey)
- assert.Equal(t, flag.Key, variant.FlagKey)
- assert.Equal(t, t.Name(), variant.Key)
- assert.Equal(t, "foo", variant.Name)
- assert.Equal(t, "bar", variant.Description)
- assert.Equal(t, attachment, variant.Attachment)
- assert.NotZero(t, variant.CreatedAt)
- assert.Equal(t, variant.CreatedAt.Seconds, variant.UpdatedAt.Seconds)
-
- // get the flag again
- flag, err = s.store.GetFlag(context.TODO(), storage.NewResource(storage.DefaultNamespace, flag.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- assert.Len(t, flag.Variants, 1)
-}
-
-func (s *DBTestSuite) TestCreateVariantNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- attachment := `{"key":"value"}`
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Attachment: attachment,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- assert.NotZero(t, variant.Id)
- assert.Equal(t, s.namespace, variant.NamespaceKey)
- assert.Equal(t, flag.Key, variant.FlagKey)
- assert.Equal(t, t.Name(), variant.Key)
- assert.Equal(t, "foo", variant.Name)
- assert.Equal(t, "bar", variant.Description)
- assert.Equal(t, attachment, variant.Attachment)
- assert.NotZero(t, variant.CreatedAt)
- assert.Equal(t, variant.CreatedAt.Seconds, variant.UpdatedAt.Seconds)
-
- // get the flag again
- flag, err = s.store.GetFlag(context.TODO(), storage.NewResource(s.namespace, flag.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- assert.Len(t, flag.Variants, 1)
-}
-
-func (s *DBTestSuite) TestCreateVariant_FlagNotFound() {
- t := s.T()
-
- _, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: "foo",
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- assert.EqualError(t, err, "flag \"default/foo\" not found")
-}
-
-func (s *DBTestSuite) TestCreateVariantNamespace_FlagNotFound() {
- t := s.T()
-
- _, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: "foo",
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- assert.EqualError(t, err, fmt.Sprintf("flag \"%s/foo\" not found", s.namespace))
-}
-
-func (s *DBTestSuite) TestCreateVariant_DuplicateKey() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- // try to create another variant with the same name for this flag
- _, err = s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- assert.EqualError(t, err, "variant \"foo\" is not unique for flag \"default/TestDBTestSuite/TestCreateVariant_DuplicateKey\"")
-}
-
-func (s *DBTestSuite) TestCreateVariantNamespace_DuplicateKey() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- // try to create another variant with the same name for this flag
- _, err = s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- assert.EqualError(t, err, fmt.Sprintf("variant \"foo\" is not unique for flag \"%s/%s\"", s.namespace, t.Name()))
-}
-
-func (s *DBTestSuite) TestCreateVariant_DuplicateKey_DifferentFlag() {
- t := s.T()
-
- flag1, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: fmt.Sprintf("%s_1", t.Name()),
- Name: "foo_1",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag1)
-
- variant1, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag1.Key,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant1)
-
- assert.NotZero(t, variant1.Id)
- assert.Equal(t, flag1.Key, variant1.FlagKey)
- assert.Equal(t, "foo", variant1.Key)
-
- flag2, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: fmt.Sprintf("%s_2", t.Name()),
- Name: "foo_2",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag2)
-
- variant2, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag2.Key,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant2)
-
- assert.NotZero(t, variant2.Id)
- assert.Equal(t, flag2.Key, variant2.FlagKey)
- assert.Equal(t, "foo", variant2.Key)
-}
-
-func (s *DBTestSuite) TestCreateVariantNamespace_DuplicateFlag_DuplicateKey() {
- t := s.T()
-
- flag1, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag1)
-
- variant1, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag1.Key,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant1)
-
- assert.NotZero(t, variant1.Id)
- assert.Equal(t, s.namespace, variant1.NamespaceKey)
- assert.Equal(t, flag1.Key, variant1.FlagKey)
- assert.Equal(t, "foo", variant1.Key)
-
- flag2, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag2)
-
- variant2, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag2.Key,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant2)
-
- assert.NotZero(t, variant2.Id)
- assert.Equal(t, storage.DefaultNamespace, variant2.NamespaceKey)
- assert.Equal(t, flag2.Key, variant2.FlagKey)
- assert.Equal(t, "foo", variant2.Key)
-}
-
-func (s *DBTestSuite) TestGetFlagWithVariantsMultiNamespace() {
- t := s.T()
-
- for _, namespace := range []string{storage.DefaultNamespace, s.namespace} {
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- attachment := `{"key":"value"}`
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: namespace,
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Attachment: attachment,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- assert.NotZero(t, variant.Id)
- assert.Equal(t, namespace, variant.NamespaceKey)
- assert.Equal(t, flag.Key, variant.FlagKey)
- assert.Equal(t, t.Name(), variant.Key)
- assert.Equal(t, "foo", variant.Name)
- assert.Equal(t, "bar", variant.Description)
- assert.Equal(t, attachment, variant.Attachment)
- assert.NotZero(t, variant.CreatedAt)
- assert.Equal(t, variant.CreatedAt.Seconds, variant.UpdatedAt.Seconds)
- }
-
- // get the default namespaced flag
- flag, err := s.store.GetFlag(context.TODO(), storage.NewResource(storage.DefaultNamespace, t.Name()))
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- assert.Len(t, flag.Variants, 1)
-
- variant := flag.Variants[0]
- assert.NotZero(t, variant.Id)
- assert.Equal(t, storage.DefaultNamespace, variant.NamespaceKey)
- assert.Equal(t, flag.Key, variant.FlagKey)
- assert.Equal(t, t.Name(), variant.Key)
- assert.Equal(t, "foo", variant.Name)
- assert.Equal(t, "bar", variant.Description)
- assert.Equal(t, `{"key":"value"}`, variant.Attachment)
- assert.NotZero(t, variant.CreatedAt)
- assert.Equal(t, variant.CreatedAt.Seconds, variant.UpdatedAt.Seconds)
-}
-
-func (s *DBTestSuite) TestUpdateVariant() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- attachment1 := `{"key":"value1"}`
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- Attachment: attachment1,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- assert.NotZero(t, variant.Id)
- assert.Equal(t, storage.DefaultNamespace, variant.NamespaceKey)
- assert.Equal(t, flag.Key, variant.FlagKey)
- assert.Equal(t, "foo", variant.Key)
- assert.Equal(t, "foo", variant.Name)
- assert.Equal(t, "bar", variant.Description)
- assert.Equal(t, attachment1, variant.Attachment)
- assert.NotZero(t, variant.CreatedAt)
- assert.Equal(t, variant.CreatedAt.Seconds, variant.UpdatedAt.Seconds)
-
- updated, err := s.store.UpdateVariant(context.TODO(), &flipt.UpdateVariantRequest{
- Id: variant.Id,
- FlagKey: variant.FlagKey,
- Key: variant.Key,
- Name: variant.Name,
- Description: "foobar",
- Attachment: `{"key": "value2"}`,
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, variant.Id, updated.Id)
- assert.Equal(t, storage.DefaultNamespace, updated.NamespaceKey)
- assert.Equal(t, variant.FlagKey, updated.FlagKey)
- assert.Equal(t, variant.Key, updated.Key)
- assert.Equal(t, variant.Name, updated.Name)
- assert.Equal(t, "foobar", updated.Description)
- assert.Equal(t, `{"key":"value2"}`, updated.Attachment)
- assert.NotZero(t, updated.CreatedAt)
- assert.NotZero(t, updated.UpdatedAt)
-
- // get the flag again
- flag, err = s.store.GetFlag(context.TODO(), storage.NewResource(storage.DefaultNamespace, flag.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- assert.Len(t, flag.Variants, 1)
-}
-
-func (s *DBTestSuite) TestUpdateVariantNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- attachment1 := `{"key":"value1"}`
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- Attachment: attachment1,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- assert.NotZero(t, variant.Id)
- assert.Equal(t, s.namespace, variant.NamespaceKey)
- assert.Equal(t, flag.Key, variant.FlagKey)
- assert.Equal(t, "foo", variant.Key)
- assert.Equal(t, "foo", variant.Name)
- assert.Equal(t, "bar", variant.Description)
- assert.Equal(t, attachment1, variant.Attachment)
- assert.NotZero(t, variant.CreatedAt)
- assert.Equal(t, variant.CreatedAt.Seconds, variant.UpdatedAt.Seconds)
-
- updated, err := s.store.UpdateVariant(context.TODO(), &flipt.UpdateVariantRequest{
- NamespaceKey: s.namespace,
- Id: variant.Id,
- FlagKey: variant.FlagKey,
- Key: variant.Key,
- Name: variant.Name,
- Description: "foobar",
- Attachment: `{"key": "value2"}`,
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, variant.Id, updated.Id)
- assert.Equal(t, s.namespace, updated.NamespaceKey)
- assert.Equal(t, variant.FlagKey, updated.FlagKey)
- assert.Equal(t, variant.Key, updated.Key)
- assert.Equal(t, variant.Name, updated.Name)
- assert.Equal(t, "foobar", updated.Description)
- assert.Equal(t, `{"key":"value2"}`, updated.Attachment)
- assert.NotZero(t, updated.CreatedAt)
- assert.NotZero(t, updated.UpdatedAt)
-
- // get the flag again
- flag, err = s.store.GetFlag(context.TODO(), storage.NewResource(s.namespace, flag.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- assert.Len(t, flag.Variants, 1)
-}
-
-func (s *DBTestSuite) TestUpdateVariant_NotFound() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- _, err = s.store.UpdateVariant(context.TODO(), &flipt.UpdateVariantRequest{
- Id: "foo",
- FlagKey: flag.Key,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- assert.EqualError(t, err, "variant \"foo\" not found")
-}
-
-func (s *DBTestSuite) TestUpdateVariantNamespace_NotFound() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- _, err = s.store.UpdateVariant(context.TODO(), &flipt.UpdateVariantRequest{
- NamespaceKey: s.namespace,
- Id: "foo",
- FlagKey: flag.Key,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- assert.EqualError(t, err, "variant \"foo\" not found")
-}
-
-func (s *DBTestSuite) TestUpdateVariant_DuplicateKey() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant1, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant1)
-
- variant2, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: "bar",
- Name: "bar",
- Description: "baz",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant2)
-
- _, err = s.store.UpdateVariant(context.TODO(), &flipt.UpdateVariantRequest{
- Id: variant2.Id,
- FlagKey: variant2.FlagKey,
- Key: variant1.Key,
- Name: variant2.Name,
- Description: "foobar",
- })
-
- assert.EqualError(t, err, "variant \"foo\" is not unique for flag \"default/TestDBTestSuite/TestUpdateVariant_DuplicateKey\"")
-}
-
-func (s *DBTestSuite) TestUpdateVariantNamespace_DuplicateKey() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant1, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant1)
-
- variant2, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Key: "bar",
- Name: "bar",
- Description: "baz",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant2)
-
- _, err = s.store.UpdateVariant(context.TODO(), &flipt.UpdateVariantRequest{
- NamespaceKey: s.namespace,
- Id: variant2.Id,
- FlagKey: variant2.FlagKey,
- Key: variant1.Key,
- Name: variant2.Name,
- Description: "foobar",
- })
-
- assert.EqualError(t, err, fmt.Sprintf("variant \"foo\" is not unique for flag \"%s/%s\"", s.namespace, t.Name()))
-}
-
-func (s *DBTestSuite) TestDeleteVariant() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- err = s.store.DeleteVariant(context.TODO(), &flipt.DeleteVariantRequest{FlagKey: variant.FlagKey, Id: variant.Id})
- require.NoError(t, err)
-
- // get the flag again
- flag, err = s.store.GetFlag(context.TODO(), storage.NewResource(storage.DefaultNamespace, flag.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- assert.Empty(t, flag.Variants)
-}
-
-func (s *DBTestSuite) TestDeleteVariantNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- err = s.store.DeleteVariant(context.TODO(), &flipt.DeleteVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: variant.FlagKey,
- Id: variant.Id,
- })
- require.NoError(t, err)
-
- // get the flag again
- flag, err = s.store.GetFlag(context.TODO(), storage.NewResource(s.namespace, flag.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- assert.Empty(t, flag.Variants)
-}
-
-func (s *DBTestSuite) TestDeleteVariant_ExistingRule() {
- t := s.T()
-
- // TODO
- t.SkipNow()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- rule, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: 1,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rule)
-
- // try to delete variant with attached rule
- err = s.store.DeleteVariant(context.TODO(), &flipt.DeleteVariantRequest{
- Id: variant.Id,
- FlagKey: flag.Key,
- })
-
- require.EqualError(t, err, "atleast one rule exists that includes this variant")
-
- // delete the rule, then try to delete the variant again
- err = s.store.DeleteRule(context.TODO(), &flipt.DeleteRuleRequest{
- Id: rule.Id,
- FlagKey: flag.Key,
- })
-
- require.NoError(t, err)
-
- err = s.store.DeleteVariant(context.TODO(), &flipt.DeleteVariantRequest{
- Id: variant.Id,
- FlagKey: flag.Key,
- })
-
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestDeleteVariant_NotFound() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- err = s.store.DeleteVariant(context.TODO(), &flipt.DeleteVariantRequest{
- Id: "foo",
- FlagKey: flag.Key,
- })
-
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestDeleteVariantNamespace_NotFound() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- err = s.store.DeleteVariant(context.TODO(), &flipt.DeleteVariantRequest{
- NamespaceKey: s.namespace,
- Id: "foo",
- FlagKey: flag.Key,
- })
-
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestDeleteVariant_DefaultVariant() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- _, err = s.store.UpdateFlag(context.TODO(), &flipt.UpdateFlagRequest{
- Key: flag.Key,
- Name: flag.Name,
- Description: flag.Description,
- Enabled: true,
- DefaultVariantId: variant.Id,
- })
-
- require.NoError(t, err)
-
- err = s.store.DeleteVariant(context.TODO(), &flipt.DeleteVariantRequest{FlagKey: variant.FlagKey, Id: variant.Id})
- require.NoError(t, err)
-
- // get the flag again
- flag, err = s.store.GetFlag(context.TODO(), storage.NewResource(storage.DefaultNamespace, flag.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- assert.Empty(t, flag.Variants)
- assert.Nil(t, flag.DefaultVariant)
-}
-
-func BenchmarkListFlags(b *testing.B) {
- s := new(DBTestSuite)
- t := &testing.T{}
- s.SetT(t)
- s.SetupSuite()
-
- for i := 0; i < 1000; i++ {
- reqs := []*flipt.CreateFlagRequest{
- {
- Key: uuid.NewString(),
- Name: fmt.Sprintf("foo_%d", i),
- Enabled: true,
- },
- }
-
- for _, req := range reqs {
- f, err := s.store.CreateFlag(context.TODO(), req)
- require.NoError(t, err)
- assert.NotNil(t, f)
-
- for j := 0; j < 10; j++ {
- v, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: f.Key,
- Key: uuid.NewString(),
- Name: fmt.Sprintf("variant_%d", j),
- })
-
- require.NoError(t, err)
- assert.NotNil(t, v)
- }
- }
- }
-
- b.ResetTimer()
-
- req := storage.ListWithOptions(storage.NewNamespace(storage.DefaultNamespace))
- b.Run("no-pagination", func(b *testing.B) {
- for i := 0; i < b.N; i++ {
- flags, err := s.store.ListFlags(context.TODO(), req)
- require.NoError(t, err)
- assert.NotEmpty(t, flags)
- }
- })
-
- for _, pageSize := range []uint64{10, 25, 100, 500} {
- req := req
- req.QueryParams.Limit = pageSize
- b.Run(fmt.Sprintf("pagination-limit-%d", pageSize), func(b *testing.B) {
- for i := 0; i < b.N; i++ {
- flags, err := s.store.ListFlags(context.TODO(), req)
- require.NoError(t, err)
- assert.NotEmpty(t, flags)
- }
- })
- }
-
- b.Run("pagination", func(b *testing.B) {
- req := req
- req.QueryParams.Limit = 500
- req.QueryParams.Offset = 50
- req.QueryParams.Order = storage.OrderDesc
- for i := 0; i < b.N; i++ {
- flags, err := s.store.ListFlags(context.TODO(), req)
- require.NoError(t, err)
- assert.NotEmpty(t, flags)
- }
- })
-
- s.TearDownSuite()
-}
diff --git a/internal/storage/sql/migrator.go b/internal/storage/sql/migrator.go
index cc50ed1427..736850dc54 100644
--- a/internal/storage/sql/migrator.go
+++ b/internal/storage/sql/migrator.go
@@ -8,23 +8,14 @@ import (
"github.com/golang-migrate/migrate/v4"
"github.com/golang-migrate/migrate/v4/database"
clickhouseMigrate "github.com/golang-migrate/migrate/v4/database/clickhouse"
- "github.com/golang-migrate/migrate/v4/database/cockroachdb"
- "github.com/golang-migrate/migrate/v4/database/mysql"
- "github.com/golang-migrate/migrate/v4/database/pgx/v5"
- "github.com/golang-migrate/migrate/v4/database/sqlite3"
"github.com/golang-migrate/migrate/v4/source/iofs"
- "go.flipt.io/flipt/config/migrations"
"go.flipt.io/flipt/internal/config"
+ "go.flipt.io/flipt/internal/migrations"
"go.uber.org/zap"
)
var expectedVersions = map[Driver]uint{
- SQLite: 15,
- LibSQL: 15, // libsql driver uses the same migrations as sqlite3
- Postgres: 16,
- MySQL: 15,
- CockroachDB: 13,
- Clickhouse: 3,
+ Clickhouse: 3,
}
// Migrator is responsible for migrating the database schema
@@ -35,35 +26,6 @@ type Migrator struct {
migrator *migrate.Migrate
}
-// NewMigrator creates a new Migrator
-func NewMigrator(cfg config.Config, logger *zap.Logger) (*Migrator, error) {
- sql, driver, err := open(cfg, Options{migrate: true})
- if err != nil {
- return nil, fmt.Errorf("opening db: %w", err)
- }
-
- var dr database.Driver
-
- switch driver {
- case SQLite, LibSQL:
- dr, err = sqlite3.WithInstance(sql, &sqlite3.Config{})
- case Postgres:
- dr, err = pgx.WithInstance(sql, &pgx.Config{})
- case CockroachDB:
- dr, err = cockroachdb.WithInstance(sql, &cockroachdb.Config{})
- case MySQL:
- dr, err = mysql.WithInstance(sql, &mysql.Config{})
- }
-
- if err != nil {
- return nil, fmt.Errorf("getting db driver for: %s: %w", driver, err)
- }
-
- logger.Debug("using driver", zap.String("driver", driver.String()))
-
- return migratorHelper(logger, sql, driver, dr)
-}
-
func migratorHelper(logger *zap.Logger, db *sql.DB, driver Driver, databaseDriver database.Driver) (*Migrator, error) {
// source migrations from embedded config/migrations package
// relative to the specific driver
@@ -168,18 +130,6 @@ func (m *Migrator) Up(force bool) error {
func (m *Migrator) Drop() error {
m.logger.Debug("running drop ...")
- switch m.driver {
- case SQLite:
- // disable foreign keys for sqlite to avoid errors when dropping tables
- // https://www.sqlite.org/foreignkeys.html#fk_enable
- // we dont need to worry about re-enabling them since we're dropping the db
- // and the connection will be closed
- _, _ = m.db.Exec("PRAGMA foreign_keys = OFF")
- case MySQL:
- // https://stackoverflow.com/questions/5452760/how-to-truncate-a-foreign-key-constrained-table
- _, _ = m.db.Exec("SET FOREIGN_KEY_CHECKS = 0;")
- }
-
if err := m.migrator.Drop(); err != nil {
return fmt.Errorf("dropping: %w", err)
}
diff --git a/internal/storage/sql/mock_pg_driver.go b/internal/storage/sql/mock_pg_driver.go
deleted file mode 100644
index 0a35e8700b..0000000000
--- a/internal/storage/sql/mock_pg_driver.go
+++ /dev/null
@@ -1,133 +0,0 @@
-// Code generated by mockery v2.42.1. DO NOT EDIT.
-
-package sql
-
-import (
- "context"
- "database/sql/driver"
- "errors"
-
- "github.com/stretchr/testify/mock"
-)
-
-// MockConnector is an autogenerated mock type for the Connector type
-type mockConnector struct {
- mock.Mock
-}
-
-// Connect provides a mock function with given fields: _a0
-func (_m *mockConnector) Connect(_a0 context.Context) (driver.Conn, error) {
- ret := _m.Called(_a0)
-
- if len(ret) == 0 {
- panic("no return value specified for Connect")
- }
-
- var r0 driver.Conn
- var r1 error
- if rf, ok := ret.Get(0).(func(context.Context) (driver.Conn, error)); ok {
- return rf(_a0)
- }
- if rf, ok := ret.Get(0).(func(context.Context) driver.Conn); ok {
- r0 = rf(_a0)
- } else {
- r0 = ret.Get(0).(driver.Conn)
- }
-
- if rf, ok := ret.Get(1).(func(context.Context) error); ok {
- r1 = rf(_a0)
- } else {
- r1 = ret.Error(1)
- }
-
- return r0, r1
-}
-
-// Driver provides a mock function with given fields:
-func (_m *mockConnector) Driver() driver.Driver {
- ret := _m.Called()
-
- if len(ret) == 0 {
- panic("no return value specified for Driver")
- }
-
- var r0 driver.Driver
- if rf, ok := ret.Get(0).(func() driver.Driver); ok {
- r0 = rf()
- } else {
- r0 = ret.Get(0).(driver.Driver)
- }
-
- return r0
-}
-
-// newMockConnector creates a new instance of MockConnector. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
-// The first argument is typically a *testing.T value.
-func newMockConnector(t interface {
- mock.TestingT
- Cleanup(func())
-}) *mockConnector {
- mock := &mockConnector{}
- mock.Mock.Test(t)
-
- t.Cleanup(func() { mock.AssertExpectations(t) })
-
- return mock
-}
-
-type mockDriverConn struct{}
-
-func (*mockDriverConn) Prepare(query string) (driver.Stmt, error) {
- return nil, errors.New("unexpected call")
-}
-func (*mockDriverConn) Close() error { return nil }
-func (*mockDriverConn) Begin() (driver.Tx, error) { return nil, errors.New("unexpected call") }
-
-// MockDriverContext is an autogenerated mock type for the DriverContext type
-type MockDriverContext struct {
- mock.Mock
-}
-
-// OpenConnector provides a mock function with given fields: name
-func (_m *MockDriverContext) OpenConnector(name string) (driver.Connector, error) {
- ret := _m.Called(name)
-
- if len(ret) == 0 {
- panic("no return value specified for OpenConnector")
- }
-
- var r0 driver.Connector
- var r1 error
- if rf, ok := ret.Get(0).(func(string) (driver.Connector, error)); ok {
- return rf(name)
- }
- if rf, ok := ret.Get(0).(func(string) driver.Connector); ok {
- r0 = rf(name)
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).(driver.Connector)
- }
- }
-
- if rf, ok := ret.Get(1).(func(string) error); ok {
- r1 = rf(name)
- } else {
- r1 = ret.Error(1)
- }
-
- return r0, r1
-}
-
-// NewMockDriverContext creates a new instance of MockDriverContext. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
-// The first argument is typically a *testing.T value.
-func NewMockDriverContext(t interface {
- mock.TestingT
- Cleanup(func())
-}) *MockDriverContext {
- mock := &MockDriverContext{}
- mock.Mock.Test(t)
-
- t.Cleanup(func() { mock.AssertExpectations(t) })
-
- return mock
-}
diff --git a/internal/storage/sql/mysql/mysql.go b/internal/storage/sql/mysql/mysql.go
deleted file mode 100644
index 2699235184..0000000000
--- a/internal/storage/sql/mysql/mysql.go
+++ /dev/null
@@ -1,238 +0,0 @@
-package mysql
-
-import (
- "context"
- "database/sql"
- "errors"
-
- sq "github.com/Masterminds/squirrel"
- "github.com/go-sql-driver/mysql"
- errs "go.flipt.io/flipt/errors"
- "go.flipt.io/flipt/internal/storage"
- "go.flipt.io/flipt/internal/storage/sql/common"
- flipt "go.flipt.io/flipt/rpc/flipt"
- "go.uber.org/zap"
-)
-
-const (
- constraintForeignKeyErr uint16 = 1452
- constraintUniqueErr uint16 = 1062
-)
-
-var _ storage.Store = &Store{}
-
-func NewStore(db *sql.DB, builder sq.StatementBuilderType, logger *zap.Logger) *Store {
- return &Store{
- Store: common.NewStore(db, builder, logger),
- }
-}
-
-type Store struct {
- *common.Store
-}
-
-func (s *Store) String() string {
- return "mysql"
-}
-
-func (s *Store) CreateNamespace(ctx context.Context, r *flipt.CreateNamespaceRequest) (*flipt.Namespace, error) {
- namespace, err := s.Store.CreateNamespace(ctx, r)
- if err != nil {
- var merr *mysql.MySQLError
-
- if errors.As(err, &merr) && merr.Number == constraintUniqueErr {
- return nil, errs.ErrInvalidf("namespace %q is not unique", r.Key)
- }
-
- return nil, err
- }
-
- return namespace, nil
-}
-
-func (s *Store) CreateFlag(ctx context.Context, r *flipt.CreateFlagRequest) (*flipt.Flag, error) {
- flag, err := s.Store.CreateFlag(ctx, r)
- if err != nil {
- var merr *mysql.MySQLError
-
- if errors.As(err, &merr) {
- switch merr.Number {
- case constraintForeignKeyErr:
- return nil, errs.ErrNotFoundf("namespace %q", r.NamespaceKey)
- case constraintUniqueErr:
- return nil, errs.ErrInvalidf(`flag "%s/%s" is not unique`, r.NamespaceKey, r.Key)
- }
- }
-
- return nil, err
- }
-
- return flag, nil
-}
-
-func (s *Store) UpdateFlag(ctx context.Context, r *flipt.UpdateFlagRequest) (*flipt.Flag, error) {
- flag, err := s.Store.UpdateFlag(ctx, r)
- if err != nil {
- var merr *mysql.MySQLError
-
- if errors.As(err, &merr) && merr.Number == constraintForeignKeyErr {
- if r.DefaultVariantId != "" {
- return nil, errs.ErrInvalidf(`variant %q not found for flag "%s/%s"`, r.DefaultVariantId, r.NamespaceKey, r.Key)
- }
-
- return nil, errs.ErrInvalidf(`flag "%s/%s" not found`, r.NamespaceKey, r.Key)
- }
-
- return nil, err
- }
-
- return flag, nil
-}
-
-func (s *Store) CreateVariant(ctx context.Context, r *flipt.CreateVariantRequest) (*flipt.Variant, error) {
- variant, err := s.Store.CreateVariant(ctx, r)
- if err != nil {
- var merr *mysql.MySQLError
-
- if errors.As(err, &merr) {
- switch merr.Number {
- case constraintForeignKeyErr:
- return nil, errs.ErrNotFoundf(`flag "%s/%s"`, r.NamespaceKey, r.FlagKey)
- case constraintUniqueErr:
- return nil, errs.ErrInvalidf(`variant %q is not unique for flag "%s/%s"`, r.Key, r.NamespaceKey, r.FlagKey)
- }
- }
-
- return nil, err
- }
-
- return variant, nil
-}
-
-func (s *Store) UpdateVariant(ctx context.Context, r *flipt.UpdateVariantRequest) (*flipt.Variant, error) {
- variant, err := s.Store.UpdateVariant(ctx, r)
- if err != nil {
- var merr *mysql.MySQLError
-
- if errors.As(err, &merr) && merr.Number == constraintUniqueErr {
- return nil, errs.ErrInvalidf(`variant %q is not unique for flag "%s/%s"`, r.Key, r.NamespaceKey, r.FlagKey)
- }
-
- return nil, err
- }
-
- return variant, nil
-}
-
-func (s *Store) CreateSegment(ctx context.Context, r *flipt.CreateSegmentRequest) (*flipt.Segment, error) {
- segment, err := s.Store.CreateSegment(ctx, r)
- if err != nil {
- var merr *mysql.MySQLError
-
- if errors.As(err, &merr) {
- switch merr.Number {
- case constraintForeignKeyErr:
- return nil, errs.ErrNotFoundf("namespace %q", r.NamespaceKey)
- case constraintUniqueErr:
- return nil, errs.ErrInvalidf(`segment "%s/%s" is not unique`, r.NamespaceKey, r.Key)
- }
- }
-
- return nil, err
- }
-
- return segment, nil
-}
-
-func (s *Store) CreateConstraint(ctx context.Context, r *flipt.CreateConstraintRequest) (*flipt.Constraint, error) {
- constraint, err := s.Store.CreateConstraint(ctx, r)
- if err != nil {
- var merr *mysql.MySQLError
-
- if errors.As(err, &merr) && merr.Number == constraintForeignKeyErr {
- return nil, errs.ErrNotFoundf(`segment "%s/%s"`, r.NamespaceKey, r.SegmentKey)
- }
-
- return nil, err
- }
-
- return constraint, nil
-}
-
-func (s *Store) CreateRollout(ctx context.Context, r *flipt.CreateRolloutRequest) (*flipt.Rollout, error) {
- rollout, err := s.Store.CreateRollout(ctx, r)
- if err != nil {
- var merr *mysql.MySQLError
-
- if errors.As(err, &merr) && merr.Number == constraintForeignKeyErr {
- if segment := r.GetSegment(); segment != nil {
- return nil, errs.ErrNotFoundf(`flag "%s/%s or segment %s"`, r.NamespaceKey, r.FlagKey, segment.SegmentKey)
- }
- return nil, errs.ErrNotFoundf(`flag "%s/%s"`, r.NamespaceKey, r.FlagKey)
- }
-
- return nil, err
- }
-
- return rollout, nil
-}
-
-func (s *Store) CreateRule(ctx context.Context, r *flipt.CreateRuleRequest) (*flipt.Rule, error) {
- rule, err := s.Store.CreateRule(ctx, r)
- if err != nil {
- var merr *mysql.MySQLError
-
- if errors.As(err, &merr) && merr.Number == constraintForeignKeyErr {
- return nil, errs.ErrNotFoundf(`flag "%s/%s" or segment "%s/%s"`, r.NamespaceKey, r.FlagKey, r.NamespaceKey, r.SegmentKey)
- }
-
- return nil, err
- }
-
- return rule, nil
-}
-
-func (s *Store) UpdateRule(ctx context.Context, r *flipt.UpdateRuleRequest) (*flipt.Rule, error) {
- rule, err := s.Store.UpdateRule(ctx, r)
- if err != nil {
- var merr *mysql.MySQLError
-
- if errors.As(err, &merr) && merr.Number == constraintForeignKeyErr {
- return nil, errs.ErrNotFoundf(`rule "%s/%s"`, r.NamespaceKey, r.Id)
- }
-
- return nil, err
- }
-
- return rule, nil
-}
-
-func (s *Store) CreateDistribution(ctx context.Context, r *flipt.CreateDistributionRequest) (*flipt.Distribution, error) {
- dist, err := s.Store.CreateDistribution(ctx, r)
- if err != nil {
- var merr *mysql.MySQLError
-
- if errors.As(err, &merr) && merr.Number == constraintForeignKeyErr {
- return nil, errs.ErrNotFoundf("variant %q, rule %q, flag %q in namespace %q", r.VariantId, r.RuleId, r.FlagKey, r.NamespaceKey)
- }
-
- return nil, err
- }
-
- return dist, nil
-}
-
-func (s *Store) DeleteSegment(ctx context.Context, r *flipt.DeleteSegmentRequest) error {
- err := s.Store.DeleteSegment(ctx, r)
- if err != nil {
- var merr *mysql.MySQLError
-
- if errors.As(err, &merr) {
- if merr.Number == constraintForeignKeyErr {
- return errs.ErrInvalidf(`segment "%s/%s" is in use`, r.NamespaceKey, r.Key)
- }
- }
- }
-
- return err
-}
diff --git a/internal/storage/sql/namespaces_test.go b/internal/storage/sql/namespaces_test.go
deleted file mode 100644
index 9fc42a805e..0000000000
--- a/internal/storage/sql/namespaces_test.go
+++ /dev/null
@@ -1,390 +0,0 @@
-package sql_test
-
-import (
- "context"
- "encoding/base64"
- "encoding/json"
-
- "github.com/google/uuid"
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
- "go.flipt.io/flipt/internal/storage"
- "go.flipt.io/flipt/internal/storage/sql/common"
- flipt "go.flipt.io/flipt/rpc/flipt"
-)
-
-func (s *DBTestSuite) TestGetNamespace() {
- t := s.T()
-
- ns, err := s.store.CreateNamespace(context.TODO(), &flipt.CreateNamespaceRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, ns)
-
- got, err := s.store.GetNamespace(context.TODO(), storage.NewNamespace(ns.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, got)
-
- assert.Equal(t, ns.Key, got.Key)
- assert.Equal(t, ns.Name, got.Name)
- assert.Equal(t, ns.Description, got.Description)
- assert.NotZero(t, ns.CreatedAt)
- assert.NotZero(t, ns.UpdatedAt)
-}
-
-func (s *DBTestSuite) TestGetNamespaceNotFound() {
- t := s.T()
-
- _, err := s.store.GetNamespace(context.TODO(), storage.NewNamespace("foo"))
- assert.EqualError(t, err, "namespace \"foo\" not found")
-}
-
-func (s *DBTestSuite) TestListNamespaces() {
- t := s.T()
-
- reqs := []*flipt.CreateNamespaceRequest{
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- }
-
- for _, req := range reqs {
- _, err := s.store.CreateNamespace(context.TODO(), req)
- require.NoError(t, err)
- }
-
- _, err := s.store.ListNamespaces(context.TODO(), storage.ListWithOptions(
- storage.ReferenceRequest{},
- storage.ListWithQueryParamOptions[storage.ReferenceRequest](
- storage.WithPageToken("Hello World"),
- ),
- ))
- require.EqualError(t, err, "pageToken is not valid: \"Hello World\"")
-
- res, err := s.store.ListNamespaces(context.TODO(), storage.ListWithOptions(
- storage.ReferenceRequest{},
- ))
- require.NoError(t, err)
-
- got := res.Results
- assert.NotEmpty(t, got)
-
- for _, ns := range got {
- assert.NotZero(t, ns.CreatedAt)
- assert.NotZero(t, ns.UpdatedAt)
- }
-}
-
-func (s *DBTestSuite) TestListNamespacesPagination_LimitOffset() {
- t := s.T()
-
- reqs := []*flipt.CreateNamespaceRequest{
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- }
-
- for _, req := range reqs {
- _, err := s.store.CreateNamespace(context.TODO(), req)
- require.NoError(t, err)
- }
-
- oldest, middle, newest := reqs[0], reqs[1], reqs[2]
-
- // TODO: the ordering (DESC) is required because the default ordering is ASC and we are not clearing the DB between tests
- // get middle namespace
- res, err := s.store.ListNamespaces(context.TODO(),
- storage.ListWithOptions(
- storage.ReferenceRequest{},
- storage.ListWithQueryParamOptions[storage.ReferenceRequest](
- storage.WithOrder(storage.OrderDesc), storage.WithLimit(1), storage.WithOffset(1)),
- ),
- )
-
- require.NoError(t, err)
-
- got := res.Results
- assert.Len(t, got, 1)
-
- assert.Equal(t, middle.Key, got[0].Key)
-
- // get first (newest) namespace
- res, err = s.store.ListNamespaces(context.TODO(),
- storage.ListWithOptions(
- storage.ReferenceRequest{},
- storage.ListWithQueryParamOptions[storage.ReferenceRequest](
- storage.WithOrder(storage.OrderDesc), storage.WithLimit(1)),
- ),
- )
-
- require.NoError(t, err)
-
- got = res.Results
- assert.Len(t, got, 1)
-
- assert.Equal(t, newest.Key, got[0].Key)
-
- // get last (oldest) namespace
- res, err = s.store.ListNamespaces(context.TODO(),
- storage.ListWithOptions(
- storage.ReferenceRequest{},
- storage.ListWithQueryParamOptions[storage.ReferenceRequest](
- storage.WithOrder(storage.OrderDesc), storage.WithLimit(1), storage.WithOffset(2)),
- ),
- )
- require.NoError(t, err)
-
- got = res.Results
- assert.Len(t, got, 1)
-
- assert.Equal(t, oldest.Key, got[0].Key)
-
- // get all namespaces
- res, err = s.store.ListNamespaces(context.TODO(),
- storage.ListWithOptions(
- storage.ReferenceRequest{},
- storage.ListWithQueryParamOptions[storage.ReferenceRequest](
- storage.WithOrder(storage.OrderDesc),
- ),
- ),
- )
- require.NoError(t, err)
-
- got = res.Results
-
- assert.Equal(t, newest.Key, got[0].Key)
- assert.Equal(t, middle.Key, got[1].Key)
- assert.Equal(t, oldest.Key, got[2].Key)
-}
-
-func (s *DBTestSuite) TestListNamespacesPagination_LimitWithNextPage() {
- t := s.T()
-
- reqs := []*flipt.CreateNamespaceRequest{
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- }
-
- for _, req := range reqs {
- _, err := s.store.CreateNamespace(context.TODO(), req)
- require.NoError(t, err)
- }
-
- oldest, middle, newest := reqs[0], reqs[1], reqs[2]
-
- // TODO: the ordering (DESC) is required because the default ordering is ASC and we are not clearing the DB between tests
- // get newest namespace
- opts := []storage.QueryOption{storage.WithOrder(storage.OrderDesc), storage.WithLimit(1)}
-
- res, err := s.store.ListNamespaces(context.TODO(), storage.ListWithOptions(
- storage.ReferenceRequest{},
- storage.ListWithQueryParamOptions[storage.ReferenceRequest](opts...),
- ))
- require.NoError(t, err)
-
- got := res.Results
- assert.Len(t, got, 1)
- assert.Equal(t, newest.Key, got[0].Key)
- assert.NotEmpty(t, res.NextPageToken)
-
- pTokenB, err := base64.StdEncoding.DecodeString(res.NextPageToken)
- require.NoError(t, err)
-
- pageToken := &common.PageToken{}
- err = json.Unmarshal(pTokenB, pageToken)
- require.NoError(t, err)
- // next page should be the middle namespace
- assert.Equal(t, middle.Key, pageToken.Key)
- assert.NotZero(t, pageToken.Offset)
-
- opts = append(opts, storage.WithPageToken(res.NextPageToken))
-
- // get middle namespace
- res, err = s.store.ListNamespaces(context.TODO(), storage.ListWithOptions(
- storage.ReferenceRequest{},
- storage.ListWithQueryParamOptions[storage.ReferenceRequest](opts...),
- ))
- require.NoError(t, err)
-
- got = res.Results
- assert.Len(t, got, 1)
- assert.Equal(t, middle.Key, got[0].Key)
-
- pTokenB, err = base64.StdEncoding.DecodeString(res.NextPageToken)
- require.NoError(t, err)
-
- err = json.Unmarshal(pTokenB, pageToken)
- require.NoError(t, err)
- // next page should be the oldest namespace
- assert.Equal(t, oldest.Key, pageToken.Key)
- assert.NotZero(t, pageToken.Offset)
-
- opts = []storage.QueryOption{storage.WithOrder(storage.OrderDesc), storage.WithLimit(1), storage.WithPageToken(res.NextPageToken)}
-
- // get oldest namespace
- res, err = s.store.ListNamespaces(context.TODO(), storage.ListWithOptions(
- storage.ReferenceRequest{},
- storage.ListWithQueryParamOptions[storage.ReferenceRequest](opts...),
- ))
- require.NoError(t, err)
-
- got = res.Results
- assert.Len(t, got, 1)
- assert.Equal(t, oldest.Key, got[0].Key)
-
- opts = []storage.QueryOption{storage.WithOrder(storage.OrderDesc), storage.WithLimit(3)}
- // get all namespaces
- res, err = s.store.ListNamespaces(context.TODO(), storage.ListWithOptions(
- storage.ReferenceRequest{},
- storage.ListWithQueryParamOptions[storage.ReferenceRequest](opts...),
- ))
- require.NoError(t, err)
-
- got = res.Results
- assert.Len(t, got, 3)
- assert.Equal(t, newest.Key, got[0].Key)
- assert.Equal(t, middle.Key, got[1].Key)
- assert.Equal(t, oldest.Key, got[2].Key)
-}
-
-func (s *DBTestSuite) TestCreateNamespace() {
- t := s.T()
-
- ns, err := s.store.CreateNamespace(context.TODO(), &flipt.CreateNamespaceRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, t.Name(), ns.Key)
- assert.Equal(t, "foo", ns.Name)
- assert.Equal(t, "bar", ns.Description)
- assert.NotZero(t, ns.CreatedAt)
- assert.Equal(t, ns.CreatedAt.Seconds, ns.UpdatedAt.Seconds)
-}
-
-func (s *DBTestSuite) TestCreateNamespace_DuplicateKey() {
- t := s.T()
-
- _, err := s.store.CreateNamespace(context.TODO(), &flipt.CreateNamespaceRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
-
- _, err = s.store.CreateNamespace(context.TODO(), &flipt.CreateNamespaceRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- assert.EqualError(t, err, "namespace \"TestDBTestSuite/TestCreateNamespace_DuplicateKey\" is not unique")
-}
-
-func (s *DBTestSuite) TestUpdateNamespace() {
- t := s.T()
-
- ns, err := s.store.CreateNamespace(context.TODO(), &flipt.CreateNamespaceRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, t.Name(), ns.Key)
- assert.Equal(t, "foo", ns.Name)
- assert.Equal(t, "bar", ns.Description)
- assert.NotZero(t, ns.CreatedAt)
- assert.Equal(t, ns.CreatedAt.Seconds, ns.UpdatedAt.Seconds)
-
- updated, err := s.store.UpdateNamespace(context.TODO(), &flipt.UpdateNamespaceRequest{
- Key: ns.Key,
- Name: ns.Name,
- Description: "foobar",
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, ns.Key, updated.Key)
- assert.Equal(t, ns.Name, updated.Name)
- assert.Equal(t, "foobar", updated.Description)
- assert.NotZero(t, updated.CreatedAt)
- assert.NotZero(t, updated.UpdatedAt)
-}
-
-func (s *DBTestSuite) TestUpdateNamespace_NotFound() {
- t := s.T()
-
- _, err := s.store.UpdateNamespace(context.TODO(), &flipt.UpdateNamespaceRequest{
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- assert.EqualError(t, err, "namespace \"foo\" not found")
-}
-
-func (s *DBTestSuite) TestDeleteNamespace() {
- t := s.T()
-
- ns, err := s.store.CreateNamespace(context.TODO(), &flipt.CreateNamespaceRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, ns)
-
- err = s.store.DeleteNamespace(context.TODO(), &flipt.DeleteNamespaceRequest{Key: ns.Key})
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestDeleteNamespace_NotFound() {
- t := s.T()
-
- err := s.store.DeleteNamespace(context.TODO(), &flipt.DeleteNamespaceRequest{Key: "foo"})
- require.NoError(t, err)
-}
diff --git a/internal/storage/sql/postgres/postgres.go b/internal/storage/sql/postgres/postgres.go
deleted file mode 100644
index 2378f20a98..0000000000
--- a/internal/storage/sql/postgres/postgres.go
+++ /dev/null
@@ -1,235 +0,0 @@
-package postgres
-
-import (
- "context"
- "database/sql"
- "errors"
-
- sq "github.com/Masterminds/squirrel"
- "github.com/jackc/pgx/v5/pgconn"
- errs "go.flipt.io/flipt/errors"
- "go.flipt.io/flipt/internal/storage"
- "go.flipt.io/flipt/internal/storage/sql/common"
- flipt "go.flipt.io/flipt/rpc/flipt"
- "go.uber.org/zap"
-)
-
-const (
- constraintForeignKeyErr = "23503" // "foreign_key_violation"
- constraintUniqueErr = "23505" // "unique_violation"
-)
-
-var _ storage.Store = &Store{}
-
-func NewStore(db *sql.DB, builder sq.StatementBuilderType, logger *zap.Logger) *Store {
- return &Store{
- Store: common.NewStore(db, builder.PlaceholderFormat(sq.Dollar), logger),
- }
-}
-
-type Store struct {
- *common.Store
-}
-
-func (s *Store) String() string {
- return "postgres"
-}
-
-func (s *Store) CreateNamespace(ctx context.Context, r *flipt.CreateNamespaceRequest) (*flipt.Namespace, error) {
- namespace, err := s.Store.CreateNamespace(ctx, r)
- if err != nil {
- var perr *pgconn.PgError
-
- if errors.As(err, &perr) && perr.Code == constraintUniqueErr {
- return nil, errs.ErrInvalidf(`namespace %q is not unique`, r.Key)
- }
-
- return nil, err
- }
-
- return namespace, nil
-}
-
-func (s *Store) CreateFlag(ctx context.Context, r *flipt.CreateFlagRequest) (*flipt.Flag, error) {
- flag, err := s.Store.CreateFlag(ctx, r)
- if err != nil {
- var perr *pgconn.PgError
-
- if errors.As(err, &perr) {
- switch perr.Code {
- case constraintForeignKeyErr:
- return nil, errs.ErrNotFoundf("namespace %q", r.NamespaceKey)
- case constraintUniqueErr:
- return nil, errs.ErrInvalidf(`flag "%s/%s" is not unique`, r.NamespaceKey, r.Key)
- }
- }
-
- return nil, err
- }
-
- return flag, nil
-}
-
-func (s *Store) UpdateFlag(ctx context.Context, r *flipt.UpdateFlagRequest) (*flipt.Flag, error) {
- flag, err := s.Store.UpdateFlag(ctx, r)
- if err != nil {
- var perr *pgconn.PgError
-
- if errors.As(err, &perr) && perr.Code == constraintForeignKeyErr {
- if r.DefaultVariantId != "" {
- return nil, errs.ErrInvalidf(`variant %q not found for flag "%s/%s"`, r.DefaultVariantId, r.NamespaceKey, r.Key)
- }
-
- return nil, errs.ErrInvalidf(`flag "%s/%s" not found`, r.NamespaceKey, r.Key)
- }
-
- return nil, err
- }
-
- return flag, nil
-}
-
-func (s *Store) CreateVariant(ctx context.Context, r *flipt.CreateVariantRequest) (*flipt.Variant, error) {
- variant, err := s.Store.CreateVariant(ctx, r)
- if err != nil {
- var perr *pgconn.PgError
-
- if errors.As(err, &perr) {
- switch perr.Code {
- case constraintForeignKeyErr:
- return nil, errs.ErrNotFoundf(`flag "%s/%s"`, r.NamespaceKey, r.FlagKey)
- case constraintUniqueErr:
- return nil, errs.ErrInvalidf(`variant %q is not unique for flag "%s/%s"`, r.Key, r.NamespaceKey, r.FlagKey)
- }
- }
-
- return nil, err
- }
-
- return variant, nil
-}
-
-func (s *Store) UpdateVariant(ctx context.Context, r *flipt.UpdateVariantRequest) (*flipt.Variant, error) {
- variant, err := s.Store.UpdateVariant(ctx, r)
- if err != nil {
- var perr *pgconn.PgError
-
- if errors.As(err, &perr) && perr.Code == constraintUniqueErr {
- return nil, errs.ErrInvalidf(`variant %q is not unique for flag "%s/%s"`, r.Key, r.NamespaceKey, r.FlagKey)
- }
-
- return nil, err
- }
-
- return variant, nil
-}
-
-func (s *Store) CreateSegment(ctx context.Context, r *flipt.CreateSegmentRequest) (*flipt.Segment, error) {
- segment, err := s.Store.CreateSegment(ctx, r)
- if err != nil {
- var perr *pgconn.PgError
-
- if errors.As(err, &perr) {
- switch perr.Code {
- case constraintForeignKeyErr:
- return nil, errs.ErrNotFoundf("namespace %q", r.NamespaceKey)
- case constraintUniqueErr:
- return nil, errs.ErrInvalidf(`segment "%s/%s" is not unique`, r.NamespaceKey, r.Key)
- }
- }
-
- return nil, err
- }
-
- return segment, nil
-}
-
-func (s *Store) CreateConstraint(ctx context.Context, r *flipt.CreateConstraintRequest) (*flipt.Constraint, error) {
- constraint, err := s.Store.CreateConstraint(ctx, r)
- if err != nil {
- var perr *pgconn.PgError
-
- if errors.As(err, &perr) && perr.Code == constraintForeignKeyErr {
- return nil, errs.ErrNotFoundf(`segment "%s/%s"`, r.NamespaceKey, r.SegmentKey)
- }
-
- return nil, err
- }
-
- return constraint, nil
-}
-
-func (s *Store) CreateRollout(ctx context.Context, r *flipt.CreateRolloutRequest) (*flipt.Rollout, error) {
- rollout, err := s.Store.CreateRollout(ctx, r)
- if err != nil {
- var perr *pgconn.PgError
-
- if errors.As(err, &perr) && perr.Code == constraintForeignKeyErr {
- if segment := r.GetSegment(); segment != nil {
- return nil, errs.ErrNotFoundf(`flag "%s/%s or segment %s"`, r.NamespaceKey, r.FlagKey, segment.SegmentKey)
- }
- return nil, errs.ErrNotFoundf(`flag "%s/%s"`, r.NamespaceKey, r.FlagKey)
- }
-
- return nil, err
- }
-
- return rollout, nil
-}
-
-func (s *Store) CreateRule(ctx context.Context, r *flipt.CreateRuleRequest) (*flipt.Rule, error) {
- rule, err := s.Store.CreateRule(ctx, r)
- if err != nil {
- var perr *pgconn.PgError
-
- if errors.As(err, &perr) && perr.Code == constraintForeignKeyErr {
- return nil, errs.ErrNotFoundf(`flag "%s/%s" or segment "%s/%s"`, r.NamespaceKey, r.FlagKey, r.NamespaceKey, r.SegmentKey)
- }
-
- return nil, err
- }
-
- return rule, nil
-}
-
-func (s *Store) UpdateRule(ctx context.Context, r *flipt.UpdateRuleRequest) (*flipt.Rule, error) {
- rule, err := s.Store.UpdateRule(ctx, r)
- if err != nil {
- var perr *pgconn.PgError
-
- if errors.As(err, &perr) && perr.Code == constraintForeignKeyErr {
- return nil, errs.ErrNotFoundf(`rule "%s/%s"`, r.NamespaceKey, r.Id)
- }
-
- return nil, err
- }
-
- return rule, nil
-}
-
-func (s *Store) CreateDistribution(ctx context.Context, r *flipt.CreateDistributionRequest) (*flipt.Distribution, error) {
- dist, err := s.Store.CreateDistribution(ctx, r)
- if err != nil {
- var perr *pgconn.PgError
-
- if errors.As(err, &perr) && perr.Code == constraintForeignKeyErr {
- return nil, errs.ErrNotFoundf("variant %q, rule %q, flag %q in namespace %q", r.VariantId, r.RuleId, r.FlagKey, r.NamespaceKey)
- }
-
- return nil, err
- }
-
- return dist, nil
-}
-
-func (s *Store) DeleteSegment(ctx context.Context, r *flipt.DeleteSegmentRequest) error {
- err := s.Store.DeleteSegment(ctx, r)
- if err != nil {
- var perr *pgconn.PgError
-
- if errors.As(err, &perr) && perr.Code == constraintForeignKeyErr {
- return errs.ErrInvalidf(`segment "%s/%s" is in use`, r.NamespaceKey, r.Key)
- }
- }
- return err
-}
diff --git a/internal/storage/sql/rollout_test.go b/internal/storage/sql/rollout_test.go
deleted file mode 100644
index 0f6d181b52..0000000000
--- a/internal/storage/sql/rollout_test.go
+++ /dev/null
@@ -1,1168 +0,0 @@
-package sql_test
-
-import (
- "context"
- "encoding/base64"
- "encoding/json"
- "fmt"
- "sort"
-
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
- errs "go.flipt.io/flipt/errors"
- "go.flipt.io/flipt/internal/storage"
- "go.flipt.io/flipt/internal/storage/sql/common"
- flipt "go.flipt.io/flipt/rpc/flipt"
-)
-
-func (s *DBTestSuite) TestGetRollout() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- rollout, err := s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- FlagKey: flag.Key,
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Value: true,
- Percentage: 40,
- },
- },
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rollout)
-
- got, err := s.store.GetRollout(context.TODO(), storage.NewNamespace(storage.DefaultNamespace), rollout.Id)
-
- require.NoError(t, err)
- assert.NotNil(t, got)
-
- assert.Equal(t, rollout.Id, got.Id)
- assert.Equal(t, storage.DefaultNamespace, got.NamespaceKey)
- assert.Equal(t, rollout.FlagKey, got.FlagKey)
- assert.Equal(t, rollout.Rank, got.Rank)
- assert.Equal(t, rollout.Type, got.Type)
- assert.Equal(t, rollout.Rule, got.Rule)
- assert.NotZero(t, got.CreatedAt)
- assert.NotZero(t, got.UpdatedAt)
-}
-
-func (s *DBTestSuite) TestGetRolloutNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- NamespaceKey: s.namespace,
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- rollout, err := s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- FlagKey: flag.Key,
- NamespaceKey: s.namespace,
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Value: true,
- Percentage: 40,
- },
- },
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rollout)
-
- got, err := s.store.GetRollout(context.TODO(), storage.NewNamespace(s.namespace), rollout.Id)
-
- require.NoError(t, err)
- assert.NotNil(t, got)
-
- assert.Equal(t, rollout.Id, got.Id)
- assert.Equal(t, s.namespace, got.NamespaceKey)
- assert.Equal(t, rollout.FlagKey, got.FlagKey)
- assert.Equal(t, rollout.Rank, got.Rank)
- assert.Equal(t, rollout.Type, got.Type)
- assert.Equal(t, rollout.Rule, got.Rule)
- assert.NotZero(t, got.CreatedAt)
- assert.NotZero(t, got.UpdatedAt)
-}
-
-func (s *DBTestSuite) TestGetRollout_NotFound() {
- t := s.T()
-
- _, err := s.store.GetRollout(context.TODO(), storage.NewNamespace(storage.DefaultNamespace), "0")
- assert.EqualError(t, err, "rollout \"default/0\" not found")
-}
-
-func (s *DBTestSuite) TestGetRolloutNamespace_NotFound() {
- t := s.T()
-
- _, err := s.store.GetRollout(context.TODO(), storage.NewNamespace(s.namespace), "0")
- assert.EqualError(t, err, fmt.Sprintf("rollout \"%s/0\" not found", s.namespace))
-}
-
-func (s *DBTestSuite) TestListRollouts() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- reqs := []*flipt.CreateRolloutRequest{
- {
- FlagKey: flag.Key,
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Value: true,
- Percentage: 40,
- },
- },
- },
- {
- FlagKey: flag.Key,
- Rank: 2,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Value: true,
- Percentage: 80.2,
- },
- },
- },
- }
-
- for _, req := range reqs {
- _, err := s.store.CreateRollout(context.TODO(), req)
- require.NoError(t, err)
- }
-
- res, err := s.store.ListRollouts(context.TODO(), storage.ListWithOptions(storage.NewResource(storage.DefaultNamespace, flag.Key)))
- require.NoError(t, err)
-
- got := res.Results
- assert.NotEmpty(t, got)
-
- for _, rollout := range got {
- assert.Equal(t, storage.DefaultNamespace, rollout.NamespaceKey)
- assert.NotZero(t, rollout.CreatedAt)
- assert.NotZero(t, rollout.UpdatedAt)
- }
-}
-
-func (s *DBTestSuite) TestListRollouts_MultipleSegments() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- firstSegment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, firstSegment)
-
- secondSegment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: "another_segment_3",
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, secondSegment)
-
- reqs := []*flipt.CreateRolloutRequest{
- {
- FlagKey: flag.Key,
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- Value: true,
- SegmentKeys: []string{firstSegment.Key, secondSegment.Key},
- },
- },
- },
- {
- FlagKey: flag.Key,
- Rank: 2,
- Rule: &flipt.CreateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- Value: true,
- SegmentKeys: []string{firstSegment.Key, secondSegment.Key},
- },
- },
- },
- }
-
- for _, req := range reqs {
- _, err := s.store.CreateRollout(context.TODO(), req)
- require.NoError(t, err)
- }
-
- res, err := s.store.ListRollouts(context.TODO(), storage.ListWithOptions(storage.NewResource(storage.DefaultNamespace, flag.Key)))
- require.NoError(t, err)
-
- got := res.Results
- assert.NotEmpty(t, got)
-
- for _, rollout := range got {
- assert.Equal(t, storage.DefaultNamespace, rollout.NamespaceKey)
-
- rs, ok := rollout.Rule.(*flipt.Rollout_Segment)
- assert.True(t, ok, "rule should successfully assert to a rollout segment")
- assert.Len(t, rs.Segment.SegmentKeys, 2)
- assert.Contains(t, rs.Segment.SegmentKeys, firstSegment.Key)
- assert.Contains(t, rs.Segment.SegmentKeys, secondSegment.Key)
- assert.NotZero(t, rollout.CreatedAt)
- assert.NotZero(t, rollout.UpdatedAt)
- }
-}
-
-func (s *DBTestSuite) TestListRolloutsNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- reqs := []*flipt.CreateRolloutRequest{
- {
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Value: true,
- Percentage: 40,
- },
- },
- },
- {
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Rank: 2,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Value: true,
- Percentage: 80.2,
- },
- },
- },
- }
-
- for _, req := range reqs {
- _, err := s.store.CreateRollout(context.TODO(), req)
- require.NoError(t, err)
- }
-
- res, err := s.store.ListRollouts(context.TODO(), storage.ListWithOptions(storage.NewResource(s.namespace, flag.Key)))
- require.NoError(t, err)
-
- got := res.Results
- assert.NotEmpty(t, got)
-
- for _, rollout := range got {
- assert.Equal(t, s.namespace, rollout.NamespaceKey)
- assert.NotZero(t, rollout.CreatedAt)
- assert.NotZero(t, rollout.UpdatedAt)
- }
-}
-
-func (s *DBTestSuite) TestListRolloutsPagination_LimitOffset() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- reqs := []*flipt.CreateRolloutRequest{
- {
- FlagKey: flag.Key,
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Value: true,
- Percentage: 40,
- },
- },
- },
- {
- FlagKey: flag.Key,
- Rank: 2,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Value: true,
- Percentage: 80.2,
- },
- },
- },
- }
-
- for _, req := range reqs {
- _, err := s.store.CreateRollout(context.TODO(), req)
- require.NoError(t, err)
- }
-
- res, err := s.store.ListRollouts(context.TODO(), storage.ListWithOptions(
- storage.NewResource(storage.DefaultNamespace, flag.Key),
- storage.ListWithQueryParamOptions[storage.ResourceRequest](storage.WithLimit(1), storage.WithOffset(1)),
- ))
- require.NoError(t, err)
-
- got := res.Results
- assert.Len(t, got, 1)
-}
-
-func (s *DBTestSuite) TestListRolloutsPagination_LimitWithNextPage() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- reqs := []*flipt.CreateRolloutRequest{
- {
- FlagKey: flag.Key,
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Value: true,
- Percentage: 40,
- },
- },
- },
- {
- FlagKey: flag.Key,
- Rank: 2,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Value: true,
- Percentage: 80.2,
- },
- },
- },
- }
-
- for _, req := range reqs {
- _, err := s.store.CreateRollout(context.TODO(), req)
- require.NoError(t, err)
- }
-
- // TODO: the ordering (DESC) is required because the default ordering is ASC and we are not clearing the DB between tests
- opts := []storage.QueryOption{storage.WithOrder(storage.OrderDesc), storage.WithLimit(1)}
-
- res, err := s.store.ListRollouts(context.TODO(),
- storage.ListWithOptions(
- storage.NewResource(storage.DefaultNamespace, flag.Key),
- storage.ListWithQueryParamOptions[storage.ResourceRequest](opts...),
- ),
- )
- require.NoError(t, err)
-
- got := res.Results
- assert.Len(t, got, 1)
- assert.Equal(t, reqs[1].Rank, got[0].Rank)
- assert.NotEmpty(t, res.NextPageToken)
-
- pageToken := &common.PageToken{}
- pTokenB, err := base64.StdEncoding.DecodeString(res.NextPageToken)
- require.NoError(t, err)
-
- err = json.Unmarshal(pTokenB, pageToken)
- require.NoError(t, err)
-
- assert.NotEmpty(t, pageToken.Key)
- assert.Equal(t, uint64(1), pageToken.Offset)
-
- opts = append(opts, storage.WithPageToken(res.NextPageToken))
-
- res, err = s.store.ListRollouts(context.TODO(),
- storage.ListWithOptions(
- storage.NewResource(storage.DefaultNamespace, flag.Key),
- storage.ListWithQueryParamOptions[storage.ResourceRequest](opts...),
- ),
- )
- require.NoError(t, err)
-
- got = res.Results
- assert.Len(t, got, 1)
- assert.Equal(t, reqs[0].Rank, got[0].Rank)
-}
-
-func (s *DBTestSuite) TestCreateRollout_InvalidRolloutType() {
- t := s.T()
-
- _, err := s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- FlagKey: "foo",
- Rank: 1,
- })
-
- assert.Equal(t, err, errs.ErrInvalid("rollout rule is missing"))
-}
-
-func (s *DBTestSuite) TestCreateRollout_FlagNotFound() {
- t := s.T()
-
- _, err := s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- FlagKey: "foo",
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: 50.0,
- Value: true,
- },
- },
- })
-
- assert.EqualError(t, err, "flag \"default/foo\" not found")
-}
-
-func (s *DBTestSuite) TestCreateRolloutNamespace_InvalidRolloutType() {
- t := s.T()
-
- _, err := s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- NamespaceKey: s.namespace,
- FlagKey: "foo",
- Rank: 1,
- })
-
- assert.Equal(t, err, errs.ErrInvalid("rollout rule is missing"))
-}
-
-func (s *DBTestSuite) TestCreateRolloutNamespace_FlagNotFound() {
- t := s.T()
-
- _, err := s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- NamespaceKey: s.namespace,
- FlagKey: "foo",
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Percentage: 50.0,
- Value: true,
- },
- },
- })
-
- assert.EqualError(t, err, fmt.Sprintf("flag \"%s/foo\" not found", s.namespace))
-}
-
-func (s *DBTestSuite) TestUpdateRollout() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- segmentOne, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: "segment_one",
- Name: "Segment One",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segmentOne)
-
- segmentTwo, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: "segment_two",
- Name: "Segment Two",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segmentTwo)
-
- rollout, err := s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- FlagKey: flag.Key,
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- Value: true,
- SegmentKey: "segment_one",
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- },
- },
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rollout)
-
- assert.NotZero(t, rollout.Id)
- assert.Equal(t, storage.DefaultNamespace, rollout.NamespaceKey)
- assert.Equal(t, flag.Key, rollout.FlagKey)
- assert.Equal(t, int32(1), rollout.Rank)
- assert.Equal(t, flipt.RolloutType_SEGMENT_ROLLOUT_TYPE, rollout.Type)
- assert.Equal(t, segmentOne.Key, rollout.GetSegment().SegmentKey)
- assert.True(t, rollout.GetSegment().Value)
- assert.NotZero(t, rollout.CreatedAt)
- assert.Equal(t, rollout.CreatedAt.Seconds, rollout.UpdatedAt.Seconds)
- assert.Equal(t, flipt.SegmentOperator_OR_SEGMENT_OPERATOR, rollout.GetSegment().SegmentOperator)
-
- updated, err := s.store.UpdateRollout(context.TODO(), &flipt.UpdateRolloutRequest{
- Id: rollout.Id,
- FlagKey: rollout.FlagKey,
- Description: "foobar",
- Rule: &flipt.UpdateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- Value: false,
- SegmentKeys: []string{segmentOne.Key, segmentTwo.Key},
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- },
- },
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, rollout.Id, updated.Id)
- assert.Equal(t, storage.DefaultNamespace, updated.NamespaceKey)
- assert.Equal(t, rollout.FlagKey, updated.FlagKey)
- assert.Equal(t, "foobar", updated.Description)
- assert.Equal(t, int32(1), updated.Rank)
- assert.Equal(t, flipt.RolloutType_SEGMENT_ROLLOUT_TYPE, updated.Type)
- assert.Contains(t, updated.GetSegment().SegmentKeys, segmentOne.Key)
- assert.Contains(t, updated.GetSegment().SegmentKeys, segmentTwo.Key)
- assert.False(t, updated.GetSegment().Value)
- assert.Equal(t, flipt.SegmentOperator_AND_SEGMENT_OPERATOR, updated.GetSegment().SegmentOperator)
- assert.NotZero(t, updated.CreatedAt)
- assert.NotZero(t, updated.UpdatedAt)
-}
-
-func (s *DBTestSuite) TestUpdateRollout_OneSegment() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- segmentOne, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: fmt.Sprintf("one_%s", t.Name()),
- Name: "Segment One",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segmentOne)
-
- segmentTwo, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: fmt.Sprintf("two_%s", t.Name()),
- Name: "Segment Two",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segmentTwo)
-
- rollout, err := s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- FlagKey: flag.Key,
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- Value: true,
- SegmentKeys: []string{segmentOne.Key, segmentTwo.Key},
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- },
- },
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rollout)
-
- assert.NotZero(t, rollout.Id)
- assert.Equal(t, storage.DefaultNamespace, rollout.NamespaceKey)
- assert.Equal(t, flag.Key, rollout.FlagKey)
- assert.Equal(t, int32(1), rollout.Rank)
- assert.Equal(t, flipt.RolloutType_SEGMENT_ROLLOUT_TYPE, rollout.Type)
- assert.Contains(t, rollout.GetSegment().SegmentKeys, segmentOne.Key)
- assert.Contains(t, rollout.GetSegment().SegmentKeys, segmentTwo.Key)
- assert.True(t, rollout.GetSegment().Value)
- assert.NotZero(t, rollout.CreatedAt)
- assert.Equal(t, rollout.CreatedAt.Seconds, rollout.UpdatedAt.Seconds)
- assert.Equal(t, flipt.SegmentOperator_AND_SEGMENT_OPERATOR, rollout.GetSegment().SegmentOperator)
-
- updated, err := s.store.UpdateRollout(context.TODO(), &flipt.UpdateRolloutRequest{
- Id: rollout.Id,
- FlagKey: rollout.FlagKey,
- Description: "foobar",
- Rule: &flipt.UpdateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- Value: false,
- SegmentKey: segmentOne.Key,
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- },
- },
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, rollout.Id, updated.Id)
- assert.Equal(t, storage.DefaultNamespace, updated.NamespaceKey)
- assert.Equal(t, rollout.FlagKey, updated.FlagKey)
- assert.Equal(t, "foobar", updated.Description)
- assert.Equal(t, int32(1), updated.Rank)
- assert.Equal(t, flipt.RolloutType_SEGMENT_ROLLOUT_TYPE, updated.Type)
- assert.Equal(t, segmentOne.Key, updated.GetSegment().SegmentKey)
- assert.False(t, updated.GetSegment().Value)
- assert.Equal(t, flipt.SegmentOperator_OR_SEGMENT_OPERATOR, updated.GetSegment().SegmentOperator)
- assert.NotZero(t, updated.CreatedAt)
- assert.NotZero(t, updated.UpdatedAt)
-}
-
-func (s *DBTestSuite) TestUpdateRolloutNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- NamespaceKey: s.namespace,
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- rollout, err := s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- FlagKey: flag.Key,
- NamespaceKey: s.namespace,
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Value: true,
- Percentage: 40,
- },
- },
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rollout)
-
- assert.NotZero(t, rollout.Id)
- assert.Equal(t, s.namespace, rollout.NamespaceKey)
- assert.Equal(t, flag.Key, rollout.FlagKey)
- assert.Equal(t, int32(1), rollout.Rank)
- assert.Equal(t, flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE, rollout.Type)
- assert.InDelta(t, 40, rollout.GetThreshold().Percentage, 0)
- assert.True(t, rollout.GetThreshold().Value)
- assert.NotZero(t, rollout.CreatedAt)
- assert.Equal(t, rollout.CreatedAt.Seconds, rollout.UpdatedAt.Seconds)
-
- updated, err := s.store.UpdateRollout(context.TODO(), &flipt.UpdateRolloutRequest{
- Id: rollout.Id,
- FlagKey: rollout.FlagKey,
- NamespaceKey: s.namespace,
- Description: "foobar",
- Rule: &flipt.UpdateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Value: false,
- Percentage: 80,
- },
- },
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, rollout.Id, updated.Id)
- assert.Equal(t, s.namespace, updated.NamespaceKey)
- assert.Equal(t, rollout.FlagKey, updated.FlagKey)
- assert.Equal(t, "foobar", updated.Description)
- assert.Equal(t, int32(1), updated.Rank)
- assert.Equal(t, flipt.RolloutType_THRESHOLD_ROLLOUT_TYPE, updated.Type)
- assert.InDelta(t, 80, updated.GetThreshold().Percentage, 0)
- assert.False(t, updated.GetThreshold().Value)
- assert.NotZero(t, updated.CreatedAt)
- assert.NotZero(t, updated.UpdatedAt)
-}
-
-func (s *DBTestSuite) TestUpdateRollout_InvalidType() {
- t := s.T()
-
- ctx := context.TODO()
- flag, err := s.store.CreateFlag(ctx, &flipt.CreateFlagRequest{
- Key: t.Name(),
- NamespaceKey: s.namespace,
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- _, err = s.store.CreateSegment(ctx, &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: "segment_one",
- Name: "Segment One",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
-
- rollout, err := s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- FlagKey: flag.Key,
- NamespaceKey: s.namespace,
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- SegmentKeys: []string{"segment_one"},
- Value: true,
- },
- },
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rollout)
-
- assert.NotZero(t, rollout.Id)
- assert.Equal(t, s.namespace, rollout.NamespaceKey)
- assert.Equal(t, flag.Key, rollout.FlagKey)
- assert.Equal(t, int32(1), rollout.Rank)
- assert.Equal(t, flipt.RolloutType_SEGMENT_ROLLOUT_TYPE, rollout.Type)
- assert.Equal(t, "segment_one", rollout.GetSegment().SegmentKey)
- assert.True(t, rollout.GetSegment().Value)
- assert.NotZero(t, rollout.CreatedAt)
- assert.Equal(t, rollout.CreatedAt.Seconds, rollout.UpdatedAt.Seconds)
-
- _, err = s.store.UpdateRollout(context.TODO(), &flipt.UpdateRolloutRequest{
- Id: rollout.Id,
- FlagKey: rollout.FlagKey,
- NamespaceKey: s.namespace,
- Description: "foobar",
- Rule: &flipt.UpdateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Value: false,
- Percentage: 80,
- },
- },
- })
-
- require.EqualError(t, err, "cannot change type of rollout: have \"SEGMENT_ROLLOUT_TYPE\" attempted \"THRESHOLD_ROLLOUT_TYPE\"")
-}
-
-func (s *DBTestSuite) TestUpdateRollout_NotFound() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- _, err = s.store.UpdateRollout(context.TODO(), &flipt.UpdateRolloutRequest{
- Id: "foo",
- FlagKey: flag.Key,
- })
-
- assert.EqualError(t, err, "rollout \"default/foo\" not found")
-}
-
-func (s *DBTestSuite) TestUpdateRolloutNamespace_NotFound() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- _, err = s.store.UpdateRollout(context.TODO(), &flipt.UpdateRolloutRequest{
- NamespaceKey: s.namespace,
- Id: "foo",
- FlagKey: flag.Key,
- })
-
- assert.EqualError(t, err, fmt.Sprintf("rollout \"%s/foo\" not found", s.namespace))
-}
-
-func (s *DBTestSuite) TestDeleteRollout() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- var rollouts []*flipt.Rollout
-
- // create 3 rollouts
- for i := 0; i < 3; i++ {
- rollout, err := s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- FlagKey: flag.Key,
- Rank: int32(i + 1),
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Value: true,
- Percentage: 40 + float32(i),
- },
- },
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rollout)
- rollouts = append(rollouts, rollout)
- }
-
- // delete second rollout
- err = s.store.DeleteRollout(context.TODO(), &flipt.DeleteRolloutRequest{
- FlagKey: flag.Key,
- Id: rollouts[1].Id,
- })
-
- require.NoError(t, err)
-
- res, err := s.store.ListRollouts(context.TODO(), storage.ListWithOptions(storage.NewResource(storage.DefaultNamespace, flag.Key)))
- // ensure rollouts are in correct order
- require.NoError(t, err)
-
- got := res.Results
- assert.NotNil(t, got)
- assert.Len(t, got, 2)
- assert.Equal(t, rollouts[0].Id, got[0].Id)
- assert.Equal(t, int32(1), got[0].Rank)
- assert.Equal(t, storage.DefaultNamespace, got[0].NamespaceKey)
- assert.Equal(t, rollouts[2].Id, got[1].Id)
- assert.Equal(t, int32(2), got[1].Rank)
- assert.Equal(t, storage.DefaultNamespace, got[1].NamespaceKey)
-}
-
-func (s *DBTestSuite) TestDeleteRolloutNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- var rollouts []*flipt.Rollout
-
- // create 3 rollouts
- for i := 0; i < 3; i++ {
- rollout, err := s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Rank: int32(i + 1),
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Value: true,
- Percentage: 40 + float32(i),
- },
- },
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rollout)
- rollouts = append(rollouts, rollout)
- }
-
- // delete second rollout
- err = s.store.DeleteRollout(context.TODO(), &flipt.DeleteRolloutRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Id: rollouts[1].Id,
- })
-
- require.NoError(t, err)
-
- res, err := s.store.ListRollouts(context.TODO(), storage.ListWithOptions(storage.NewResource(s.namespace, flag.Key)))
- // ensure rollouts are in correct order
- require.NoError(t, err)
-
- got := res.Results
- assert.NotNil(t, got)
- assert.Len(t, got, 2)
- assert.Equal(t, rollouts[0].Id, got[0].Id)
- assert.Equal(t, int32(1), got[0].Rank)
- assert.Equal(t, s.namespace, got[0].NamespaceKey)
- assert.Equal(t, rollouts[2].Id, got[1].Id)
- assert.Equal(t, int32(2), got[1].Rank)
- assert.Equal(t, s.namespace, got[1].NamespaceKey)
-}
-
-func (s *DBTestSuite) TestDeleteRollout_NotFound() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- err = s.store.DeleteRollout(context.TODO(), &flipt.DeleteRolloutRequest{
- Id: "foo",
- FlagKey: flag.Key,
- })
-
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestDeleteRolloutNamespace_NotFound() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- err = s.store.DeleteRollout(context.TODO(), &flipt.DeleteRolloutRequest{
- NamespaceKey: s.namespace,
- Id: "foo",
- FlagKey: flag.Key,
- })
-
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestOrderRollouts() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- var rollouts []*flipt.Rollout
-
- // create 3 rollouts
- for i := 0; i < 3; i++ {
- rollout, err := s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- FlagKey: flag.Key,
- Rank: int32(i + 1),
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Value: true,
- Percentage: 40 + float32(i),
- },
- },
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rollout)
- rollouts = append(rollouts, rollout)
- }
-
- // order rollouts in reverse order
- sort.Slice(rollouts, func(i, j int) bool { return rollouts[i].Rank > rollouts[j].Rank })
-
- var rolloutIds []string
- for _, rollout := range rollouts {
- rolloutIds = append(rolloutIds, rollout.Id)
- }
-
- // re-order rollouts
- err = s.store.OrderRollouts(context.TODO(), &flipt.OrderRolloutsRequest{
- FlagKey: flag.Key,
- RolloutIds: rolloutIds,
- })
-
- require.NoError(t, err)
-
- res, err := s.store.ListRollouts(context.TODO(), storage.ListWithOptions(storage.NewResource(storage.DefaultNamespace, flag.Key)))
-
- // ensure rollouts are in correct order
- require.NoError(t, err)
- got := res.Results
- assert.NotNil(t, got)
- assert.Len(t, got, 3)
-
- assert.Equal(t, rollouts[0].Id, got[0].Id)
- assert.Equal(t, int32(1), got[0].Rank)
- assert.Equal(t, storage.DefaultNamespace, got[0].NamespaceKey)
-
- assert.Equal(t, rollouts[1].Id, got[1].Id)
- assert.Equal(t, int32(2), got[1].Rank)
- assert.Equal(t, storage.DefaultNamespace, got[1].NamespaceKey)
-
- assert.Equal(t, rollouts[2].Id, got[2].Id)
- assert.Equal(t, int32(3), got[2].Rank)
- assert.Equal(t, storage.DefaultNamespace, got[2].NamespaceKey)
-}
-
-func (s *DBTestSuite) TestOrderRolloutsNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- Type: flipt.FlagType_BOOLEAN_FLAG_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- var rollouts []*flipt.Rollout
-
- // create 3 rollouts
- for i := 0; i < 3; i++ {
- rollout, err := s.store.CreateRollout(context.TODO(), &flipt.CreateRolloutRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Rank: int32(i + 1),
- Rule: &flipt.CreateRolloutRequest_Threshold{
- Threshold: &flipt.RolloutThreshold{
- Value: true,
- Percentage: 40 + float32(i),
- },
- },
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rollout)
- rollouts = append(rollouts, rollout)
- }
-
- // order rollouts in reverse order
- sort.Slice(rollouts, func(i, j int) bool { return rollouts[i].Rank > rollouts[j].Rank })
-
- var rolloutIds []string
- for _, rollout := range rollouts {
- rolloutIds = append(rolloutIds, rollout.Id)
- }
-
- // re-order rollouts
- err = s.store.OrderRollouts(context.TODO(), &flipt.OrderRolloutsRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- RolloutIds: rolloutIds,
- })
-
- require.NoError(t, err)
-
- res, err := s.store.ListRollouts(context.TODO(), storage.ListWithOptions(storage.NewResource(s.namespace, flag.Key)))
-
- // ensure rollouts are in correct order
- require.NoError(t, err)
- got := res.Results
- assert.NotNil(t, got)
- assert.Len(t, got, 3)
-
- assert.Equal(t, rollouts[0].Id, got[0].Id)
- assert.Equal(t, int32(1), got[0].Rank)
- assert.Equal(t, s.namespace, got[0].NamespaceKey)
-
- assert.Equal(t, rollouts[1].Id, got[1].Id)
- assert.Equal(t, int32(2), got[1].Rank)
- assert.Equal(t, s.namespace, got[1].NamespaceKey)
-
- assert.Equal(t, rollouts[2].Id, got[2].Id)
- assert.Equal(t, int32(3), got[2].Rank)
- assert.Equal(t, s.namespace, got[2].NamespaceKey)
-}
diff --git a/internal/storage/sql/rule_test.go b/internal/storage/sql/rule_test.go
deleted file mode 100644
index 261df2f86f..0000000000
--- a/internal/storage/sql/rule_test.go
+++ /dev/null
@@ -1,1670 +0,0 @@
-package sql_test
-
-import (
- "context"
- "encoding/base64"
- "encoding/json"
- "fmt"
- "sort"
- "time"
-
- "github.com/google/uuid"
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
- "go.flipt.io/flipt/internal/storage"
- fliptsql "go.flipt.io/flipt/internal/storage/sql"
- "go.flipt.io/flipt/internal/storage/sql/common"
- flipt "go.flipt.io/flipt/rpc/flipt"
-)
-
-func (s *DBTestSuite) TestGetRule() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- rule, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: 1,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rule)
-
- got, err := s.store.GetRule(context.TODO(), storage.NewNamespace(storage.DefaultNamespace), rule.Id)
-
- require.NoError(t, err)
- assert.NotNil(t, got)
-
- assert.Equal(t, rule.Id, got.Id)
- assert.Equal(t, storage.DefaultNamespace, got.NamespaceKey)
- assert.Equal(t, rule.FlagKey, got.FlagKey)
- assert.Equal(t, rule.SegmentKey, got.SegmentKey)
- assert.Equal(t, rule.Rank, got.Rank)
- assert.NotZero(t, got.CreatedAt)
- assert.NotZero(t, got.UpdatedAt)
-}
-
-func (s *DBTestSuite) TestGetRule_MultipleSegments() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- firstSegment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, firstSegment)
-
- secondSegment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: "another_segment_1",
- Name: "bar",
- Description: "foo",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, secondSegment)
-
- rule, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- FlagKey: flag.Key,
- SegmentKeys: []string{firstSegment.Key, secondSegment.Key},
- Rank: 1,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rule)
-
- got, err := s.store.GetRule(context.TODO(), storage.NewNamespace(storage.DefaultNamespace), rule.Id)
-
- require.NoError(t, err)
- assert.NotNil(t, got)
-
- assert.Equal(t, rule.Id, got.Id)
- assert.Equal(t, storage.DefaultNamespace, got.NamespaceKey)
- assert.Equal(t, rule.FlagKey, got.FlagKey)
-
- assert.Len(t, rule.SegmentKeys, 2)
- assert.Equal(t, firstSegment.Key, rule.SegmentKeys[0])
- assert.Equal(t, secondSegment.Key, rule.SegmentKeys[1])
- assert.Equal(t, rule.Rank, got.Rank)
- assert.NotZero(t, got.CreatedAt)
- assert.NotZero(t, got.UpdatedAt)
-}
-
-func (s *DBTestSuite) TestGetRuleNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- rule, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: 1,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rule)
-
- got, err := s.store.GetRule(context.TODO(), storage.NewNamespace(s.namespace), rule.Id)
-
- require.NoError(t, err)
- assert.NotNil(t, got)
-
- assert.Equal(t, rule.Id, got.Id)
- assert.Equal(t, s.namespace, got.NamespaceKey)
- assert.Equal(t, rule.FlagKey, got.FlagKey)
- assert.Equal(t, rule.SegmentKey, got.SegmentKey)
- assert.Equal(t, rule.Rank, got.Rank)
- assert.NotZero(t, got.CreatedAt)
- assert.NotZero(t, got.UpdatedAt)
-}
-
-func (s *DBTestSuite) TestGetRule_NotFound() {
- t := s.T()
-
- _, err := s.store.GetRule(context.TODO(), storage.NewNamespace(storage.DefaultNamespace), "0")
- assert.EqualError(t, err, "rule \"default/0\" not found")
-}
-
-func (s *DBTestSuite) TestGetRuleNamespace_NotFound() {
- t := s.T()
-
- _, err := s.store.GetRule(context.TODO(), storage.NewNamespace(s.namespace), "0")
- assert.EqualError(t, err, fmt.Sprintf("rule \"%s/0\" not found", s.namespace))
-}
-
-func (s *DBTestSuite) TestListRules() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- reqs := []*flipt.CreateRuleRequest{
- {
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: 1,
- },
- {
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: 2,
- },
- }
-
- for _, req := range reqs {
- _, err := s.store.CreateRule(context.TODO(), req)
- require.NoError(t, err)
- }
-
- _, err = s.store.ListRules(context.TODO(),
- storage.ListWithOptions(
- storage.NewResource(storage.DefaultNamespace, flag.Key),
- storage.ListWithQueryParamOptions[storage.ResourceRequest](storage.WithPageToken("Hello World")),
- ),
- )
- require.EqualError(t, err, "pageToken is not valid: \"Hello World\"")
-
- res, err := s.store.ListRules(context.TODO(), storage.ListWithOptions(storage.NewResource(storage.DefaultNamespace, flag.Key)))
- require.NoError(t, err)
-
- got := res.Results
- assert.NotEmpty(t, got)
-
- for _, rule := range got {
- assert.Equal(t, storage.DefaultNamespace, rule.NamespaceKey)
- assert.NotZero(t, rule.CreatedAt)
- assert.NotZero(t, rule.UpdatedAt)
- }
-}
-
-func (s *DBTestSuite) TestListRules_MultipleSegments() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- firstSegment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, firstSegment)
-
- secondSegment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: "another_segment_2",
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, secondSegment)
-
- reqs := []*flipt.CreateRuleRequest{
- {
- FlagKey: flag.Key,
- SegmentKeys: []string{firstSegment.Key, secondSegment.Key},
- Rank: 1,
- },
- {
- FlagKey: flag.Key,
- SegmentKeys: []string{firstSegment.Key, secondSegment.Key},
- Rank: 2,
- },
- }
-
- for _, req := range reqs {
- _, err := s.store.CreateRule(context.TODO(), req)
- require.NoError(t, err)
- }
-
- _, err = s.store.ListRules(context.TODO(),
- storage.ListWithOptions(
- storage.NewResource(storage.DefaultNamespace, flag.Key),
- storage.ListWithQueryParamOptions[storage.ResourceRequest](storage.WithPageToken("Hello World"))))
- require.EqualError(t, err, "pageToken is not valid: \"Hello World\"")
-
- res, err := s.store.ListRules(context.TODO(), storage.ListWithOptions(storage.NewResource(storage.DefaultNamespace, flag.Key)))
- require.NoError(t, err)
-
- got := res.Results
- assert.NotEmpty(t, got)
-
- for _, rule := range got {
- assert.Equal(t, storage.DefaultNamespace, rule.NamespaceKey)
- assert.Len(t, rule.SegmentKeys, 2)
- assert.Contains(t, rule.SegmentKeys, firstSegment.Key)
- assert.Contains(t, rule.SegmentKeys, secondSegment.Key)
- assert.NotZero(t, rule.CreatedAt)
- assert.NotZero(t, rule.UpdatedAt)
- }
-}
-
-func (s *DBTestSuite) TestListRulesNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- reqs := []*flipt.CreateRuleRequest{
- {
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: 1,
- },
- {
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: 2,
- },
- }
-
- for _, req := range reqs {
- _, err := s.store.CreateRule(context.TODO(), req)
- require.NoError(t, err)
- }
-
- res, err := s.store.ListRules(context.TODO(), storage.ListWithOptions(storage.NewResource(s.namespace, flag.Key)))
- require.NoError(t, err)
-
- got := res.Results
- assert.NotEmpty(t, got)
-
- for _, rule := range got {
- assert.Equal(t, s.namespace, rule.NamespaceKey)
- assert.NotZero(t, rule.CreatedAt)
- assert.NotZero(t, rule.UpdatedAt)
- }
-}
-
-func (s *DBTestSuite) TestListRulesPagination_LimitOffset() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- reqs := []*flipt.CreateRuleRequest{
- {
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: 1,
- },
- {
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: 2,
- },
- }
-
- for _, req := range reqs {
- _, err := s.store.CreateRule(context.TODO(), req)
- require.NoError(t, err)
- }
-
- res, err := s.store.ListRules(context.TODO(),
- storage.ListWithOptions(
- storage.NewResource(storage.DefaultNamespace, flag.Key),
- storage.ListWithQueryParamOptions[storage.ResourceRequest](storage.WithLimit(1), storage.WithOffset(1)),
- ))
- require.NoError(t, err)
-
- got := res.Results
- assert.Len(t, got, 1)
-}
-
-func (s *DBTestSuite) TestListRulesPagination_LimitWithNextPage() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- reqs := []*flipt.CreateRuleRequest{
- {
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: 1,
- },
- {
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: 2,
- },
- }
-
- for _, req := range reqs {
- _, err := s.store.CreateRule(context.TODO(), req)
- require.NoError(t, err)
- }
-
- // TODO: the ordering (DESC) is required because the default ordering is ASC and we are not clearing the DB between tests
- opts := []storage.QueryOption{storage.WithOrder(storage.OrderDesc), storage.WithLimit(1)}
-
- res, err := s.store.ListRules(context.TODO(),
- storage.ListWithOptions(
- storage.NewResource(storage.DefaultNamespace, flag.Key),
- storage.ListWithQueryParamOptions[storage.ResourceRequest](opts...),
- ))
- require.NoError(t, err)
-
- got := res.Results
- assert.Len(t, got, 1)
- assert.Equal(t, reqs[1].Rank, got[0].Rank)
- assert.NotEmpty(t, res.NextPageToken)
-
- pTokenB, err := base64.StdEncoding.DecodeString(res.NextPageToken)
- require.NoError(t, err)
-
- pageToken := &common.PageToken{}
- err = json.Unmarshal(pTokenB, pageToken)
- require.NoError(t, err)
- assert.NotEmpty(t, pageToken.Key)
- assert.Equal(t, uint64(1), pageToken.Offset)
-
- opts = append(opts, storage.WithPageToken(res.NextPageToken))
-
- res, err = s.store.ListRules(context.TODO(),
- storage.ListWithOptions(
- storage.NewResource(storage.DefaultNamespace, flag.Key),
- storage.ListWithQueryParamOptions[storage.ResourceRequest](opts...),
- ))
- require.NoError(t, err)
-
- got = res.Results
- assert.Len(t, got, 1)
- assert.Equal(t, reqs[0].Rank, got[0].Rank)
-}
-
-func (s *DBTestSuite) TestListRulesPagination_FullWalk() {
- t := s.T()
-
- namespace := uuid.NewString()
-
- ctx := context.Background()
- _, err := s.store.CreateNamespace(ctx, &flipt.CreateNamespaceRequest{
- Key: namespace,
- })
- require.NoError(t, err)
-
- flag, err := s.store.CreateFlag(ctx, &flipt.CreateFlagRequest{
- NamespaceKey: namespace,
- Key: "flag-list-rules-full-walk",
- Name: "flag-list-rules-full-walk",
- })
- require.NoError(t, err)
-
- variant, err := s.store.CreateVariant(ctx, &flipt.CreateVariantRequest{
- NamespaceKey: namespace,
- FlagKey: flag.Key,
- Key: "variant-list-rules-full-walk",
- })
- require.NoError(t, err)
-
- segment, err := s.store.CreateSegment(ctx, &flipt.CreateSegmentRequest{
- NamespaceKey: namespace,
- Key: "segment-list-rules-full-walk",
- Name: "segment-list-rules-full-walk",
- })
- require.NoError(t, err)
-
- var (
- totalRules = 9
- pageSize = uint64(3)
- )
-
- for i := 0; i < totalRules; i++ {
- req := flipt.CreateRuleRequest{
- NamespaceKey: namespace,
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: int32(i + 1),
- }
-
- rule, err := s.store.CreateRule(ctx, &req)
- require.NoError(t, err)
-
- for i := 0; i < 2; i++ {
- if i > 0 && s.db.Driver == fliptsql.MySQL {
- // required for MySQL since it only s.stores timestamps to the second and not millisecond granularity
- time.Sleep(time.Second)
- }
-
- _, err := s.store.CreateDistribution(ctx, &flipt.CreateDistributionRequest{
- NamespaceKey: namespace,
- FlagKey: flag.Key,
- VariantId: variant.Id,
- RuleId: rule.Id,
- Rollout: 100.0,
- })
- require.NoError(t, err)
- }
- }
-
- resp, err := s.store.ListRules(ctx,
- storage.ListWithOptions(
- storage.NewResource(namespace, flag.Key),
- storage.ListWithQueryParamOptions[storage.ResourceRequest](
- storage.WithLimit(pageSize),
- ),
- ))
- require.NoError(t, err)
-
- found := resp.Results
- for token := resp.NextPageToken; token != ""; token = resp.NextPageToken {
- resp, err = s.store.ListRules(ctx,
- storage.ListWithOptions(
- storage.NewResource(namespace, flag.Key),
- storage.ListWithQueryParamOptions[storage.ResourceRequest](
- storage.WithLimit(pageSize),
- storage.WithPageToken(token),
- ),
- ),
- )
- require.NoError(t, err)
-
- found = append(found, resp.Results...)
- }
-
- require.Len(t, found, totalRules)
-
- for i := 0; i < totalRules; i++ {
- assert.Equal(t, namespace, found[i].NamespaceKey)
- assert.Equal(t, flag.Key, found[i].FlagKey)
- assert.Equal(t, segment.Key, found[i].SegmentKey)
- assert.Equal(t, int32(i+1), found[i].Rank)
-
- require.Len(t, found[i].Distributions, 2)
- assert.Equal(t, found[i].Id, found[i].Distributions[0].RuleId)
- assert.Equal(t, variant.Id, found[i].Distributions[0].VariantId)
- assert.InDelta(t, 100.0, found[i].Distributions[0].Rollout, 0)
-
- assert.Equal(t, found[i].Id, found[i].Distributions[1].RuleId)
- assert.Equal(t, variant.Id, found[i].Distributions[1].VariantId)
- assert.InDelta(t, 100.0, found[i].Distributions[1].Rollout, 0)
- }
-}
-
-func (s *DBTestSuite) TestCreateRuleAndDistribution() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- rule, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: 1,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rule)
-
- assert.NotZero(t, rule.Id)
- assert.Equal(t, flag.Key, rule.FlagKey)
- assert.Equal(t, segment.Key, rule.SegmentKey)
- assert.Equal(t, int32(1), rule.Rank)
- assert.NotZero(t, rule.CreatedAt)
- assert.Equal(t, rule.CreatedAt.Seconds, rule.UpdatedAt.Seconds)
-
- distribution, err := s.store.CreateDistribution(context.TODO(), &flipt.CreateDistributionRequest{
- FlagKey: flag.Key,
- RuleId: rule.Id,
- VariantId: variant.Id,
- Rollout: 100,
- })
-
- require.NoError(t, err)
- assert.NotZero(t, distribution.Id)
- assert.Equal(t, rule.Id, distribution.RuleId)
- assert.Equal(t, variant.Id, distribution.VariantId)
- assert.InDelta(t, 100, distribution.Rollout, 0)
- assert.NotZero(t, distribution.CreatedAt)
- assert.Equal(t, distribution.CreatedAt.Seconds, distribution.UpdatedAt.Seconds)
-}
-
-func (s *DBTestSuite) TestCreateRuleAndDistributionNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- rule, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- Rank: 1,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rule)
-
- assert.Equal(t, s.namespace, rule.NamespaceKey)
- assert.NotZero(t, rule.Id)
- assert.Equal(t, flag.Key, rule.FlagKey)
- assert.Equal(t, segment.Key, rule.SegmentKey)
- assert.Equal(t, int32(1), rule.Rank)
- assert.NotZero(t, rule.CreatedAt)
- assert.Equal(t, rule.CreatedAt.Seconds, rule.UpdatedAt.Seconds)
- assert.Equal(t, flipt.SegmentOperator_OR_SEGMENT_OPERATOR, rule.SegmentOperator)
-
- distribution, err := s.store.CreateDistribution(context.TODO(), &flipt.CreateDistributionRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- RuleId: rule.Id,
- VariantId: variant.Id,
- Rollout: 100,
- })
-
- require.NoError(t, err)
- assert.NotZero(t, distribution.Id)
- assert.Equal(t, rule.Id, distribution.RuleId)
- assert.Equal(t, variant.Id, distribution.VariantId)
- assert.InDelta(t, 100, distribution.Rollout, 0)
- assert.NotZero(t, distribution.CreatedAt)
- assert.Equal(t, distribution.CreatedAt.Seconds, distribution.UpdatedAt.Seconds)
-}
-
-func (s *DBTestSuite) TestCreateDistribution_NoRule() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- _, err = s.store.CreateDistribution(context.TODO(), &flipt.CreateDistributionRequest{
- FlagKey: flag.Key,
- RuleId: "foo",
- VariantId: variant.Id,
- Rollout: 100,
- })
-
- assert.EqualError(t, err, fmt.Sprintf("variant %q, rule %q, flag %q in namespace %q not found", variant.Id, "foo", flag.Key, "default"))
-}
-
-func (s *DBTestSuite) TestCreateRule_FlagNotFound() {
- t := s.T()
-
- _, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- FlagKey: "foo",
- SegmentKey: "bar",
- Rank: 1,
- })
-
- assert.EqualError(t, err, "flag \"default/foo\" or segment \"default/bar\" not found")
-}
-
-func (s *DBTestSuite) TestCreateRuleNamespace_FlagNotFound() {
- t := s.T()
-
- _, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- NamespaceKey: s.namespace,
- FlagKey: "foo",
- SegmentKey: "bar",
- Rank: 1,
- })
-
- assert.EqualError(t, err, fmt.Sprintf("flag \"%s/foo\" or segment \"%s/bar\" not found", s.namespace, s.namespace))
-}
-
-func (s *DBTestSuite) TestCreateRule_SegmentNotFound() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- _, err = s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- FlagKey: flag.Key,
- SegmentKey: "foo",
- Rank: 1,
- })
-
- assert.EqualError(t, err, "flag \"default/TestDBTestSuite/TestCreateRule_SegmentNotFound\" or segment \"default/foo\" not found")
-}
-
-func (s *DBTestSuite) TestCreateRuleNamespace_SegmentNotFound() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- _, err = s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- SegmentKey: "foo",
- Rank: 1,
- })
-
- assert.EqualError(t, err, fmt.Sprintf("flag \"%s/%s\" or segment \"%s/foo\" not found", s.namespace, t.Name(), s.namespace))
-}
-
-func (s *DBTestSuite) TestUpdateRuleAndDistribution() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variantOne, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: t.Name() + "foo",
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variantOne)
-
- variantTwo, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: t.Name() + "bar",
- Name: "bar",
- Description: "baz",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variantOne)
-
- segmentOne, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: fmt.Sprintf("%s_one", t.Name()),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segmentOne)
-
- rule, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- FlagKey: flag.Key,
- SegmentKey: segmentOne.Key,
- Rank: 1,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rule)
-
- assert.NotZero(t, rule.Id)
- assert.Equal(t, flag.Key, rule.FlagKey)
- assert.Equal(t, segmentOne.Key, rule.SegmentKey)
- assert.Equal(t, int32(1), rule.Rank)
- assert.NotZero(t, rule.CreatedAt)
- assert.Equal(t, rule.CreatedAt.Seconds, rule.UpdatedAt.Seconds)
-
- distribution, err := s.store.CreateDistribution(context.TODO(), &flipt.CreateDistributionRequest{
- FlagKey: flag.Key,
- RuleId: rule.Id,
- VariantId: variantOne.Id,
- Rollout: 100,
- })
-
- require.NoError(t, err)
- assert.NotZero(t, distribution.Id)
- assert.Equal(t, rule.Id, distribution.RuleId)
- assert.Equal(t, variantOne.Id, distribution.VariantId)
- assert.InDelta(t, 100, distribution.Rollout, 0)
- assert.NotZero(t, distribution.CreatedAt)
- assert.Equal(t, distribution.CreatedAt.Seconds, distribution.UpdatedAt.Seconds)
-
- segmentTwo, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: fmt.Sprintf("%s_two", t.Name()),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segmentTwo)
-
- updatedRule, err := s.store.UpdateRule(context.TODO(), &flipt.UpdateRuleRequest{
- Id: rule.Id,
- FlagKey: flag.Key,
- SegmentKey: segmentTwo.Key,
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, updatedRule)
-
- assert.Equal(t, rule.Id, updatedRule.Id)
- assert.Equal(t, rule.FlagKey, updatedRule.FlagKey)
- assert.Equal(t, segmentTwo.Key, updatedRule.SegmentKey)
- assert.Equal(t, int32(1), updatedRule.Rank)
- assert.Equal(t, flipt.SegmentOperator_OR_SEGMENT_OPERATOR, updatedRule.SegmentOperator)
- // assert.Equal(t, rule.CreatedAt.Seconds, updatedRule.CreatedAt.Seconds)
- assert.NotZero(t, rule.UpdatedAt)
-
- t.Log("Update rule to references two segments.")
-
- updatedRule, err = s.store.UpdateRule(context.TODO(), &flipt.UpdateRuleRequest{
- Id: rule.Id,
- FlagKey: flag.Key,
- SegmentKeys: []string{segmentOne.Key, segmentTwo.Key},
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, updatedRule)
-
- assert.Equal(t, rule.Id, updatedRule.Id)
- assert.Equal(t, rule.FlagKey, updatedRule.FlagKey)
- assert.Contains(t, updatedRule.SegmentKeys, segmentOne.Key)
- assert.Contains(t, updatedRule.SegmentKeys, segmentTwo.Key)
- assert.Equal(t, flipt.SegmentOperator_AND_SEGMENT_OPERATOR, updatedRule.SegmentOperator)
- assert.Equal(t, int32(1), updatedRule.Rank)
- assert.NotZero(t, rule.UpdatedAt)
-
- // update distribution rollout
- updatedDistribution, err := s.store.UpdateDistribution(context.TODO(), &flipt.UpdateDistributionRequest{
- FlagKey: flag.Key,
- Id: distribution.Id,
- RuleId: rule.Id,
- VariantId: variantOne.Id,
- Rollout: 10,
- })
-
- require.NoError(t, err)
- assert.Equal(t, distribution.Id, updatedDistribution.Id)
- assert.Equal(t, rule.Id, updatedDistribution.RuleId)
- assert.Equal(t, variantOne.Id, updatedDistribution.VariantId)
- assert.InDelta(t, 10, updatedDistribution.Rollout, 0)
- assert.NotZero(t, rule.UpdatedAt)
-
- // update distribution variant
- updatedDistribution, err = s.store.UpdateDistribution(context.TODO(), &flipt.UpdateDistributionRequest{
- FlagKey: flag.Key,
- Id: distribution.Id,
- RuleId: rule.Id,
- VariantId: variantTwo.Id,
- Rollout: 10,
- })
-
- require.NoError(t, err)
- assert.Equal(t, distribution.Id, updatedDistribution.Id)
- assert.Equal(t, rule.Id, updatedDistribution.RuleId)
- assert.Equal(t, variantTwo.Id, updatedDistribution.VariantId)
- assert.InDelta(t, 10, updatedDistribution.Rollout, 0)
- assert.NotZero(t, rule.UpdatedAt)
-
- err = s.store.DeleteDistribution(context.TODO(), &flipt.DeleteDistributionRequest{
- Id: distribution.Id,
- RuleId: rule.Id,
- VariantId: variantOne.Id,
- })
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestUpdateRuleAndDistributionNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variantOne, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Key: t.Name() + "foo",
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variantOne)
-
- variantTwo, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Key: t.Name() + "bar",
- Name: "bar",
- Description: "baz",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variantOne)
-
- segmentOne, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: fmt.Sprintf("%s_one", t.Name()),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segmentOne)
-
- rule, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- SegmentKey: segmentOne.Key,
- Rank: 1,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rule)
-
- assert.Equal(t, s.namespace, rule.NamespaceKey)
- assert.NotZero(t, rule.Id)
- assert.Equal(t, flag.Key, rule.FlagKey)
- assert.Equal(t, segmentOne.Key, rule.SegmentKey)
- assert.Equal(t, int32(1), rule.Rank)
- assert.NotZero(t, rule.CreatedAt)
- assert.Equal(t, rule.CreatedAt.Seconds, rule.UpdatedAt.Seconds)
-
- distribution, err := s.store.CreateDistribution(context.TODO(), &flipt.CreateDistributionRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- RuleId: rule.Id,
- VariantId: variantOne.Id,
- Rollout: 100,
- })
-
- require.NoError(t, err)
- assert.NotZero(t, distribution.Id)
- assert.Equal(t, rule.Id, distribution.RuleId)
- assert.Equal(t, variantOne.Id, distribution.VariantId)
- assert.InDelta(t, 100, distribution.Rollout, 0)
- assert.NotZero(t, distribution.CreatedAt)
- assert.Equal(t, distribution.CreatedAt.Seconds, distribution.UpdatedAt.Seconds)
-
- segmentTwo, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: fmt.Sprintf("%s_two", t.Name()),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segmentTwo)
-
- updatedRule, err := s.store.UpdateRule(context.TODO(), &flipt.UpdateRuleRequest{
- NamespaceKey: s.namespace,
- Id: rule.Id,
- FlagKey: flag.Key,
- SegmentKey: segmentTwo.Key,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, updatedRule)
-
- assert.Equal(t, s.namespace, rule.NamespaceKey)
- assert.Equal(t, rule.Id, updatedRule.Id)
- assert.Equal(t, rule.FlagKey, updatedRule.FlagKey)
- assert.Equal(t, segmentTwo.Key, updatedRule.SegmentKey)
- assert.Equal(t, int32(1), updatedRule.Rank)
- // assert.Equal(t, rule.CreatedAt.Seconds, updatedRule.CreatedAt.Seconds)
- assert.NotZero(t, rule.UpdatedAt)
-
- // update distribution rollout
- updatedDistribution, err := s.store.UpdateDistribution(context.TODO(), &flipt.UpdateDistributionRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Id: distribution.Id,
- RuleId: rule.Id,
- VariantId: variantOne.Id,
- Rollout: 10,
- })
-
- require.NoError(t, err)
- assert.Equal(t, distribution.Id, updatedDistribution.Id)
- assert.Equal(t, rule.Id, updatedDistribution.RuleId)
- assert.Equal(t, variantOne.Id, updatedDistribution.VariantId)
- assert.InDelta(t, 10, updatedDistribution.Rollout, 0)
- // assert.Equal(t, distribution.CreatedAt.Seconds, updatedDistribution.CreatedAt.Seconds)
- assert.NotZero(t, rule.UpdatedAt)
-
- // update distribution variant
- updatedDistribution, err = s.store.UpdateDistribution(context.TODO(), &flipt.UpdateDistributionRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Id: distribution.Id,
- RuleId: rule.Id,
- VariantId: variantTwo.Id,
- Rollout: 10,
- })
-
- require.NoError(t, err)
- assert.Equal(t, distribution.Id, updatedDistribution.Id)
- assert.Equal(t, rule.Id, updatedDistribution.RuleId)
- assert.Equal(t, variantTwo.Id, updatedDistribution.VariantId)
- assert.InDelta(t, 10, updatedDistribution.Rollout, 0)
- // assert.Equal(t, distribution.CreatedAt.Seconds, updatedDistribution.CreatedAt.Seconds)
- assert.NotZero(t, rule.UpdatedAt)
-
- err = s.store.DeleteDistribution(context.TODO(), &flipt.DeleteDistributionRequest{
- Id: distribution.Id,
- RuleId: rule.Id,
- VariantId: variantOne.Id,
- })
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestUpdateRule_NotFound() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- _, err = s.store.UpdateRule(context.TODO(), &flipt.UpdateRuleRequest{
- Id: "foo",
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- })
-
- assert.EqualError(t, err, "rule \"default/foo\" not found")
-}
-
-func (s *DBTestSuite) TestUpdateRuleNamespace_NotFound() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- _, err = s.store.UpdateRule(context.TODO(), &flipt.UpdateRuleRequest{
- NamespaceKey: s.namespace,
- Id: "foo",
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- })
-
- assert.EqualError(t, err, fmt.Sprintf("rule \"%s/foo\" not found", s.namespace))
-}
-
-func (s *DBTestSuite) TestDeleteRule() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- var rules []*flipt.Rule
-
- // create 3 rules
- for i := 0; i < 3; i++ {
- rule, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: int32(i + 1),
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rule)
- rules = append(rules, rule)
- }
-
- // delete second rule
- err = s.store.DeleteRule(context.TODO(), &flipt.DeleteRuleRequest{
- FlagKey: flag.Key,
- Id: rules[1].Id,
- })
-
- require.NoError(t, err)
-
- res, err := s.store.ListRules(context.TODO(), storage.ListWithOptions(storage.NewResource(storage.DefaultNamespace, flag.Key)))
- // ensure rules are in correct order
- require.NoError(t, err)
-
- got := res.Results
- assert.NotNil(t, got)
- assert.Len(t, got, 2)
- assert.Equal(t, rules[0].Id, got[0].Id)
- assert.Equal(t, int32(1), got[0].Rank)
- assert.Equal(t, storage.DefaultNamespace, got[0].NamespaceKey)
- assert.Equal(t, rules[2].Id, got[1].Id)
- assert.Equal(t, int32(2), got[1].Rank)
- assert.Equal(t, storage.DefaultNamespace, got[1].NamespaceKey)
-}
-
-func (s *DBTestSuite) TestDeleteRuleNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- var rules []*flipt.Rule
-
- // create 3 rules
- for i := 0; i < 3; i++ {
- rule, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: int32(i + 1),
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rule)
- rules = append(rules, rule)
- }
-
- // delete second rule
- err = s.store.DeleteRule(context.TODO(), &flipt.DeleteRuleRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Id: rules[1].Id,
- })
-
- require.NoError(t, err)
-
- res, err := s.store.ListRules(context.TODO(), storage.ListWithOptions(storage.NewResource(s.namespace, flag.Key)))
- // ensure rules are in correct order
- require.NoError(t, err)
-
- got := res.Results
- assert.NotNil(t, got)
- assert.Len(t, got, 2)
- assert.Equal(t, rules[0].Id, got[0].Id)
- assert.Equal(t, int32(1), got[0].Rank)
- assert.Equal(t, s.namespace, got[0].NamespaceKey)
- assert.Equal(t, rules[2].Id, got[1].Id)
- assert.Equal(t, int32(2), got[1].Rank)
- assert.Equal(t, s.namespace, got[1].NamespaceKey)
-}
-
-func (s *DBTestSuite) TestDeleteRule_NotFound() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- err = s.store.DeleteRule(context.TODO(), &flipt.DeleteRuleRequest{
- Id: "foo",
- FlagKey: flag.Key,
- })
-
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestDeleteRuleNamespace_NotFound() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- err = s.store.DeleteRule(context.TODO(), &flipt.DeleteRuleRequest{
- NamespaceKey: s.namespace,
- Id: "foo",
- FlagKey: flag.Key,
- })
-
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestOrderRules() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- var rules []*flipt.Rule
-
- // create 3 rules
- for i := 0; i < 3; i++ {
- rule, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: int32(i + 1),
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rule)
- rules = append(rules, rule)
- }
-
- // order rules in reverse order
- sort.Slice(rules, func(i, j int) bool { return rules[i].Rank > rules[j].Rank })
-
- var ruleIds []string
- for _, rule := range rules {
- ruleIds = append(ruleIds, rule.Id)
- }
-
- // re-order rules
- err = s.store.OrderRules(context.TODO(), &flipt.OrderRulesRequest{
- FlagKey: flag.Key,
- RuleIds: ruleIds,
- })
-
- require.NoError(t, err)
-
- res, err := s.store.ListRules(context.TODO(), storage.ListWithOptions(storage.NewResource(storage.DefaultNamespace, flag.Key)))
-
- // ensure rules are in correct order
- require.NoError(t, err)
- got := res.Results
- assert.NotNil(t, got)
- assert.Len(t, got, 3)
-
- assert.Equal(t, rules[0].Id, got[0].Id)
- assert.Equal(t, int32(1), got[0].Rank)
- assert.Equal(t, storage.DefaultNamespace, got[0].NamespaceKey)
-
- assert.Equal(t, rules[1].Id, got[1].Id)
- assert.Equal(t, int32(2), got[1].Rank)
- assert.Equal(t, storage.DefaultNamespace, got[1].NamespaceKey)
-
- assert.Equal(t, rules[2].Id, got[2].Id)
- assert.Equal(t, int32(3), got[2].Rank)
- assert.Equal(t, storage.DefaultNamespace, got[2].NamespaceKey)
-}
-
-func (s *DBTestSuite) TestOrderRulesNamespace() {
- t := s.T()
-
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- var rules []*flipt.Rule
-
- // create 3 rules
- for i := 0; i < 3; i++ {
- rule, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- SegmentKey: segment.Key,
- Rank: int32(i + 1),
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rule)
- rules = append(rules, rule)
- }
-
- // order rules in reverse order
- sort.Slice(rules, func(i, j int) bool { return rules[i].Rank > rules[j].Rank })
-
- var ruleIds []string
- for _, rule := range rules {
- ruleIds = append(ruleIds, rule.Id)
- }
-
- // re-order rules
- err = s.store.OrderRules(context.TODO(), &flipt.OrderRulesRequest{
- NamespaceKey: s.namespace,
- FlagKey: flag.Key,
- RuleIds: ruleIds,
- })
-
- require.NoError(t, err)
-
- res, err := s.store.ListRules(context.TODO(), storage.ListWithOptions(storage.NewResource(s.namespace, flag.Key)))
-
- // ensure rules are in correct order
- require.NoError(t, err)
- got := res.Results
- assert.NotNil(t, got)
- assert.Len(t, got, 3)
-
- assert.Equal(t, rules[0].Id, got[0].Id)
- assert.Equal(t, int32(1), got[0].Rank)
- assert.Equal(t, s.namespace, got[0].NamespaceKey)
-
- assert.Equal(t, rules[1].Id, got[1].Id)
- assert.Equal(t, int32(2), got[1].Rank)
- assert.Equal(t, s.namespace, got[1].NamespaceKey)
-
- assert.Equal(t, rules[2].Id, got[2].Id)
- assert.Equal(t, int32(3), got[2].Rank)
- assert.Equal(t, s.namespace, got[2].NamespaceKey)
-}
diff --git a/internal/storage/sql/segment_test.go b/internal/storage/sql/segment_test.go
deleted file mode 100644
index 497ed6610f..0000000000
--- a/internal/storage/sql/segment_test.go
+++ /dev/null
@@ -1,1369 +0,0 @@
-package sql_test
-
-import (
- "context"
- "encoding/base64"
- "encoding/json"
- "fmt"
- "testing"
- "time"
-
- "go.flipt.io/flipt/internal/storage"
- fliptsql "go.flipt.io/flipt/internal/storage/sql"
- "go.flipt.io/flipt/internal/storage/sql/common"
- flipt "go.flipt.io/flipt/rpc/flipt"
-
- "github.com/google/uuid"
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
-)
-
-func (s *DBTestSuite) TestGetSegment() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- MatchType: flipt.MatchType_ALL_MATCH_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- got, err := s.store.GetSegment(context.TODO(), storage.NewResource(storage.DefaultNamespace, segment.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, got)
-
- assert.Equal(t, storage.DefaultNamespace, got.NamespaceKey)
- assert.Equal(t, segment.Key, got.Key)
- assert.Equal(t, segment.Name, got.Name)
- assert.Equal(t, segment.Description, got.Description)
- assert.NotZero(t, got.CreatedAt)
- assert.NotZero(t, got.UpdatedAt)
- assert.Equal(t, segment.MatchType, got.MatchType)
-}
-
-func (s *DBTestSuite) TestGetSegmentNamespace() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- MatchType: flipt.MatchType_ALL_MATCH_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- got, err := s.store.GetSegment(context.TODO(), storage.NewResource(s.namespace, segment.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, got)
-
- assert.Equal(t, s.namespace, got.NamespaceKey)
- assert.Equal(t, segment.Key, got.Key)
- assert.Equal(t, segment.Name, got.Name)
- assert.Equal(t, segment.Description, got.Description)
- assert.NotZero(t, got.CreatedAt)
- assert.NotZero(t, got.UpdatedAt)
- assert.Equal(t, segment.MatchType, got.MatchType)
-}
-
-func (s *DBTestSuite) TestGetSegment_NotFound() {
- t := s.T()
-
- _, err := s.store.GetSegment(context.TODO(), storage.NewResource(storage.DefaultNamespace, "foo"))
- assert.EqualError(t, err, "segment \"default/foo\" not found")
-}
-
-func (s *DBTestSuite) TestGetSegmentNamespace_NotFound() {
- t := s.T()
-
- _, err := s.store.GetSegment(context.TODO(), storage.NewResource(s.namespace, "foo"))
- assert.EqualError(t, err, fmt.Sprintf("segment \"%s/foo\" not found", s.namespace))
-}
-
-func (s *DBTestSuite) TestGetSegment_WithConstraint() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- MatchType: flipt.MatchType_ALL_MATCH_TYPE,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- // ensure we support older versions of Flipt where constraints have NULL descriptions.
- _, err = s.db.DB.Exec(fmt.Sprintf(`INSERT INTO constraints (id, segment_key, type, property, operator, value) VALUES ('%s', '%s', 1, 'foo', 'eq', 'bar');`,
- uuid.NewString(),
- segment.Key))
-
- require.NoError(t, err)
-
- got, err := s.store.GetSegment(context.TODO(), storage.NewResource(storage.DefaultNamespace, segment.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, got)
-
- assert.Equal(t, storage.DefaultNamespace, got.NamespaceKey)
- assert.Equal(t, segment.Key, got.Key)
- assert.Equal(t, segment.Name, got.Name)
- assert.Equal(t, segment.Description, got.Description)
- assert.NotZero(t, got.CreatedAt)
- assert.NotZero(t, got.UpdatedAt)
- assert.Equal(t, segment.MatchType, got.MatchType)
-
- require.Len(t, got.Constraints, 1)
-}
-
-func (s *DBTestSuite) TestListSegments() {
- t := s.T()
-
- reqs := []*flipt.CreateSegmentRequest{
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- }
-
- for _, req := range reqs {
- _, err := s.store.CreateSegment(context.TODO(), req)
- require.NoError(t, err)
- }
-
- _, err := s.store.ListSegments(context.TODO(), storage.ListWithOptions(storage.NewNamespace(storage.DefaultNamespace), storage.ListWithQueryParamOptions[storage.NamespaceRequest](storage.WithPageToken("Hello World"))))
- require.EqualError(t, err, "pageToken is not valid: \"Hello World\"")
-
- res, err := s.store.ListSegments(context.TODO(), storage.ListWithOptions(storage.NewNamespace(storage.DefaultNamespace)))
- require.NoError(t, err)
- got := res.Results
- assert.NotEmpty(t, got)
-
- for _, segment := range got {
- assert.Equal(t, storage.DefaultNamespace, segment.NamespaceKey)
- assert.NotZero(t, segment.CreatedAt)
- assert.NotZero(t, segment.UpdatedAt)
- }
-}
-
-func (s *DBTestSuite) TestListSegmentsNamespace() {
- t := s.T()
-
- reqs := []*flipt.CreateSegmentRequest{
- {
- NamespaceKey: s.namespace,
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- {
- NamespaceKey: s.namespace,
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- }
-
- for _, req := range reqs {
- _, err := s.store.CreateSegment(context.TODO(), req)
- require.NoError(t, err)
- }
-
- res, err := s.store.ListSegments(context.TODO(), storage.ListWithOptions(storage.NewNamespace(s.namespace)))
- require.NoError(t, err)
- got := res.Results
- assert.NotEmpty(t, got)
-
- for _, segment := range got {
- assert.Equal(t, s.namespace, segment.NamespaceKey)
- assert.NotZero(t, segment.CreatedAt)
- assert.NotZero(t, segment.UpdatedAt)
- }
-}
-
-func (s *DBTestSuite) TestListSegmentsPagination_LimitOffset() {
- t := s.T()
-
- reqs := []*flipt.CreateSegmentRequest{
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- }
-
- for _, req := range reqs {
- if s.db.Driver == fliptsql.MySQL {
- // required for MySQL since it only s.stores timestamps to the second and not millisecond granularity
- time.Sleep(time.Second)
- }
- _, err := s.store.CreateSegment(context.TODO(), req)
- require.NoError(t, err)
- }
-
- oldest, middle, newest := reqs[0], reqs[1], reqs[2]
-
- // TODO: the ordering (DESC) is required because the default ordering is ASC and we are not clearing the DB between tests
- // get middle segment
- res, err := s.store.ListSegments(context.TODO(), storage.ListWithOptions(storage.NewNamespace(storage.DefaultNamespace), storage.ListWithQueryParamOptions[storage.NamespaceRequest](storage.WithOrder(storage.OrderDesc), storage.WithLimit(1), storage.WithOffset(1))))
- require.NoError(t, err)
-
- got := res.Results
- assert.Len(t, got, 1)
-
- assert.Equal(t, middle.Key, got[0].Key)
-
- // get first (newest) segment
- res, err = s.store.ListSegments(context.TODO(), storage.ListWithOptions(storage.NewNamespace(storage.DefaultNamespace), storage.ListWithQueryParamOptions[storage.NamespaceRequest](storage.WithOrder(storage.OrderDesc), storage.WithLimit(1))))
- require.NoError(t, err)
-
- got = res.Results
- assert.Len(t, got, 1)
-
- assert.Equal(t, newest.Key, got[0].Key)
-
- // get last (oldest) segment
- res, err = s.store.ListSegments(context.TODO(), storage.ListWithOptions(storage.NewNamespace(storage.DefaultNamespace), storage.ListWithQueryParamOptions[storage.NamespaceRequest](storage.WithOrder(storage.OrderDesc), storage.WithLimit(1), storage.WithOffset(2))))
- require.NoError(t, err)
-
- got = res.Results
- assert.Len(t, got, 1)
-
- assert.Equal(t, oldest.Key, got[0].Key)
-
- // get all segments
- res, err = s.store.ListSegments(context.TODO(), storage.ListWithOptions(storage.NewNamespace(storage.DefaultNamespace), storage.ListWithQueryParamOptions[storage.NamespaceRequest](storage.WithOrder(storage.OrderDesc))))
- require.NoError(t, err)
-
- got = res.Results
-
- assert.Equal(t, newest.Key, got[0].Key)
- assert.Equal(t, middle.Key, got[1].Key)
- assert.Equal(t, oldest.Key, got[2].Key)
-}
-
-func (s *DBTestSuite) TestListSegmentsPagination_LimitWithNextPage() {
- t := s.T()
-
- reqs := []*flipt.CreateSegmentRequest{
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- {
- Key: uuid.NewString(),
- Name: "foo",
- Description: "bar",
- },
- }
-
- oldest, middle, newest := reqs[0], reqs[1], reqs[2]
-
- for _, req := range reqs {
- if s.db.Driver == fliptsql.MySQL {
- // required for MySQL since it only s.stores timestamps to the second and not millisecond granularity
- time.Sleep(time.Second)
- }
- _, err := s.store.CreateSegment(context.TODO(), req)
- require.NoError(t, err)
- }
-
- // TODO: the ordering (DESC) is required because the default ordering is ASC and we are not clearing the DB between tests
- // get newest segment
- opts := []storage.QueryOption{storage.WithOrder(storage.OrderDesc), storage.WithLimit(1)}
-
- res, err := s.store.ListSegments(context.TODO(), storage.ListWithOptions(storage.NewNamespace(storage.DefaultNamespace), storage.ListWithQueryParamOptions[storage.NamespaceRequest](opts...)))
- require.NoError(t, err)
-
- got := res.Results
- assert.Len(t, got, 1)
- assert.Equal(t, newest.Key, got[0].Key)
- assert.NotEmpty(t, res.NextPageToken)
-
- pTokenB, err := base64.StdEncoding.DecodeString(res.NextPageToken)
- require.NoError(t, err)
-
- pageToken := &common.PageToken{}
- err = json.Unmarshal(pTokenB, pageToken)
- require.NoError(t, err)
- // next page should be the middle segment
- assert.Equal(t, middle.Key, pageToken.Key)
- assert.NotZero(t, pageToken.Offset)
-
- opts = append(opts, storage.WithPageToken(res.NextPageToken))
-
- // get middle segment
- res, err = s.store.ListSegments(context.TODO(), storage.ListWithOptions(storage.NewNamespace(storage.DefaultNamespace), storage.ListWithQueryParamOptions[storage.NamespaceRequest](opts...)))
- require.NoError(t, err)
-
- got = res.Results
- assert.Len(t, got, 1)
- assert.Equal(t, middle.Key, got[0].Key)
-
- pTokenB, err = base64.StdEncoding.DecodeString(res.NextPageToken)
- require.NoError(t, err)
-
- err = json.Unmarshal(pTokenB, pageToken)
- require.NoError(t, err)
- // next page should be the oldest segment
- assert.Equal(t, oldest.Key, pageToken.Key)
- assert.NotZero(t, pageToken.Offset)
-
- opts = []storage.QueryOption{storage.WithOrder(storage.OrderDesc), storage.WithLimit(1), storage.WithPageToken(res.NextPageToken)}
-
- // get oldest segment
- res, err = s.store.ListSegments(context.TODO(), storage.ListWithOptions(storage.NewNamespace(storage.DefaultNamespace), storage.ListWithQueryParamOptions[storage.NamespaceRequest](opts...)))
- require.NoError(t, err)
-
- got = res.Results
- assert.Len(t, got, 1)
- assert.Equal(t, oldest.Key, got[0].Key)
-
- opts = []storage.QueryOption{storage.WithOrder(storage.OrderDesc), storage.WithLimit(3)}
- // get all segments
- res, err = s.store.ListSegments(context.TODO(), storage.ListWithOptions(storage.NewNamespace(storage.DefaultNamespace), storage.ListWithQueryParamOptions[storage.NamespaceRequest](opts...)))
- require.NoError(t, err)
-
- got = res.Results
- assert.Len(t, got, 3)
- assert.Equal(t, newest.Key, got[0].Key)
- assert.Equal(t, middle.Key, got[1].Key)
- assert.Equal(t, oldest.Key, got[2].Key)
-}
-
-func (s *DBTestSuite) TestListSegmentsPagination_FullWalk() {
- t := s.T()
-
- namespace := uuid.NewString()
-
- ctx := context.Background()
- _, err := s.store.CreateNamespace(ctx, &flipt.CreateNamespaceRequest{
- Key: namespace,
- })
- require.NoError(t, err)
-
- var (
- totalSegments = 9
- pageSize = uint64(3)
- )
-
- for i := 0; i < totalSegments; i++ {
- req := flipt.CreateSegmentRequest{
- NamespaceKey: namespace,
- Key: fmt.Sprintf("segment_%03d", i),
- Name: "foo",
- Description: "bar",
- }
-
- _, err := s.store.CreateSegment(ctx, &req)
- require.NoError(t, err)
-
- for i := 0; i < 2; i++ {
- if i > 0 && s.db.Driver == fliptsql.MySQL {
- // required for MySQL since it only s.stores timestamps to the second and not millisecond granularity
- time.Sleep(time.Second)
- }
-
- _, err := s.store.CreateConstraint(ctx, &flipt.CreateConstraintRequest{
- NamespaceKey: namespace,
- SegmentKey: req.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: flipt.OpEQ,
- Value: "bar",
- })
- require.NoError(t, err)
- }
- }
-
- req := storage.ListWithOptions(
- storage.NewNamespace(namespace),
- storage.ListWithQueryParamOptions[storage.NamespaceRequest](storage.WithLimit(pageSize)),
- )
- resp, err := s.store.ListSegments(ctx, req)
-
- require.NoError(t, err)
-
- found := resp.Results
- for token := resp.NextPageToken; token != ""; token = resp.NextPageToken {
- req.QueryParams.PageToken = token
- resp, err = s.store.ListSegments(ctx, req)
- require.NoError(t, err)
-
- found = append(found, resp.Results...)
- }
-
- require.Len(t, found, totalSegments)
-
- for i := 0; i < totalSegments; i++ {
- assert.Equal(t, namespace, found[i].NamespaceKey)
-
- expectedSegment := fmt.Sprintf("segment_%03d", i)
- assert.Equal(t, expectedSegment, found[i].Key)
- assert.Equal(t, "foo", found[i].Name)
- assert.Equal(t, "bar", found[i].Description)
-
- require.Len(t, found[i].Constraints, 2)
- assert.Equal(t, namespace, found[i].Constraints[0].NamespaceKey)
- assert.Equal(t, expectedSegment, found[i].Constraints[0].SegmentKey)
- assert.Equal(t, flipt.ComparisonType_STRING_COMPARISON_TYPE, found[i].Constraints[0].Type)
- assert.Equal(t, "foo", found[i].Constraints[0].Property)
- assert.Equal(t, flipt.OpEQ, found[i].Constraints[0].Operator)
- assert.Equal(t, "bar", found[i].Constraints[0].Value)
-
- assert.Equal(t, namespace, found[i].Constraints[1].NamespaceKey)
- assert.Equal(t, expectedSegment, found[i].Constraints[1].SegmentKey)
- assert.Equal(t, flipt.ComparisonType_STRING_COMPARISON_TYPE, found[i].Constraints[1].Type)
- assert.Equal(t, "foo", found[i].Constraints[1].Property)
- assert.Equal(t, flipt.OpEQ, found[i].Constraints[1].Operator)
- assert.Equal(t, "bar", found[i].Constraints[1].Value)
- }
-}
-
-func (s *DBTestSuite) TestCreateSegment() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, t.Name(), segment.Key)
- assert.Equal(t, "foo", segment.Name)
- assert.Equal(t, "bar", segment.Description)
- assert.Equal(t, flipt.MatchType_ANY_MATCH_TYPE, segment.MatchType)
- assert.NotZero(t, segment.CreatedAt)
- assert.Equal(t, segment.CreatedAt.Seconds, segment.UpdatedAt.Seconds)
-}
-
-func (s *DBTestSuite) TestCreateSegmentNamespace() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, s.namespace, segment.NamespaceKey)
- assert.Equal(t, t.Name(), segment.Key)
- assert.Equal(t, "foo", segment.Name)
- assert.Equal(t, "bar", segment.Description)
- assert.Equal(t, flipt.MatchType_ANY_MATCH_TYPE, segment.MatchType)
- assert.NotZero(t, segment.CreatedAt)
- assert.Equal(t, segment.CreatedAt.Seconds, segment.UpdatedAt.Seconds)
-}
-
-func (s *DBTestSuite) TestCreateSegment_DuplicateKey() {
- t := s.T()
-
- _, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
-
- _, err = s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- assert.EqualError(t, err, "segment \"default/TestDBTestSuite/TestCreateSegment_DuplicateKey\" is not unique")
-}
-
-func (s *DBTestSuite) TestCreateSegmentNamespace_DuplicateKey() {
- t := s.T()
-
- _, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
-
- _, err = s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- assert.EqualError(t, err, fmt.Sprintf("segment \"%s/%s\" is not unique", s.namespace, t.Name()))
-}
-
-func (s *DBTestSuite) TestUpdateSegment() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- MatchType: flipt.MatchType_ALL_MATCH_TYPE,
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, storage.DefaultNamespace, segment.NamespaceKey)
- assert.Equal(t, t.Name(), segment.Key)
- assert.Equal(t, "foo", segment.Name)
- assert.Equal(t, "bar", segment.Description)
- assert.Equal(t, flipt.MatchType_ALL_MATCH_TYPE, segment.MatchType)
- assert.NotZero(t, segment.CreatedAt)
- assert.Equal(t, segment.CreatedAt.Seconds, segment.UpdatedAt.Seconds)
-
- updated, err := s.store.UpdateSegment(context.TODO(), &flipt.UpdateSegmentRequest{
- Key: segment.Key,
- Name: segment.Name,
- Description: "foobar",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, storage.DefaultNamespace, updated.NamespaceKey)
- assert.Equal(t, segment.Key, updated.Key)
- assert.Equal(t, segment.Name, updated.Name)
- assert.Equal(t, "foobar", updated.Description)
- assert.Equal(t, flipt.MatchType_ANY_MATCH_TYPE, updated.MatchType)
- assert.NotZero(t, updated.CreatedAt)
- assert.NotZero(t, updated.UpdatedAt)
-}
-
-func (s *DBTestSuite) TestUpdateSegmentNamespace() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- MatchType: flipt.MatchType_ALL_MATCH_TYPE,
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, s.namespace, segment.NamespaceKey)
- assert.Equal(t, t.Name(), segment.Key)
- assert.Equal(t, "foo", segment.Name)
- assert.Equal(t, "bar", segment.Description)
- assert.Equal(t, flipt.MatchType_ALL_MATCH_TYPE, segment.MatchType)
- assert.NotZero(t, segment.CreatedAt)
- assert.Equal(t, segment.CreatedAt.Seconds, segment.UpdatedAt.Seconds)
-
- updated, err := s.store.UpdateSegment(context.TODO(), &flipt.UpdateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: segment.Key,
- Name: segment.Name,
- Description: "foobar",
- MatchType: flipt.MatchType_ANY_MATCH_TYPE,
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, s.namespace, updated.NamespaceKey)
- assert.Equal(t, segment.Key, updated.Key)
- assert.Equal(t, segment.Name, updated.Name)
- assert.Equal(t, "foobar", updated.Description)
- assert.Equal(t, flipt.MatchType_ANY_MATCH_TYPE, updated.MatchType)
- assert.NotZero(t, updated.CreatedAt)
- assert.NotZero(t, updated.UpdatedAt)
-}
-
-func (s *DBTestSuite) TestUpdateSegment_NotFound() {
- t := s.T()
-
- _, err := s.store.UpdateSegment(context.TODO(), &flipt.UpdateSegmentRequest{
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- assert.EqualError(t, err, "segment \"default/foo\" not found")
-}
-
-func (s *DBTestSuite) TestUpdateSegmentNamespace_NotFound() {
- t := s.T()
-
- _, err := s.store.UpdateSegment(context.TODO(), &flipt.UpdateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: "foo",
- Name: "foo",
- Description: "bar",
- })
-
- assert.EqualError(t, err, fmt.Sprintf("segment \"%s/foo\" not found", s.namespace))
-}
-
-func (s *DBTestSuite) TestDeleteSegment() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- err = s.store.DeleteSegment(context.TODO(), &flipt.DeleteSegmentRequest{Key: segment.Key})
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestDeleteSegmentNamespace() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- err = s.store.DeleteSegment(context.TODO(), &flipt.DeleteSegmentRequest{
- NamespaceKey: s.namespace,
- Key: segment.Key,
- })
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestDeleteSegment_ExistingRule() {
- t := s.T()
-
- tests := []struct {
- Type flipt.FlagType
- add func(t *testing.T, flagKey, segmentKey string)
- remove func(t *testing.T, flagKey string)
- }{
- {
- flipt.FlagType_VARIANT_FLAG_TYPE,
- func(t *testing.T, flagKey, segmentKey string) {
- variant, err := s.store.CreateVariant(context.TODO(), &flipt.CreateVariantRequest{
- NamespaceKey: s.namespace,
- FlagKey: flagKey,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, variant)
-
- rule, err := s.store.CreateRule(context.TODO(), &flipt.CreateRuleRequest{
- NamespaceKey: s.namespace,
- FlagKey: flagKey,
- SegmentKey: segmentKey,
- Rank: 1,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, rule)
- },
- func(t *testing.T, flagKey string) {
- rules, err := s.store.ListRules(context.TODO(), storage.ListWithOptions(storage.NewResource(s.namespace, flagKey)))
-
- require.NoError(t, err)
- assert.Len(t, rules.Results, 1)
-
- // delete the rule, then try to delete the segment again
- err = s.store.DeleteRule(context.TODO(), &flipt.DeleteRuleRequest{
- Id: rules.Results[0].Id,
- NamespaceKey: s.namespace,
- FlagKey: flagKey,
- })
- require.NoError(t, err)
- },
- },
- {
- flipt.FlagType_BOOLEAN_FLAG_TYPE,
- func(t *testing.T, flagKey, segmentKey string) {
- _, err := s.store.CreateRollout(context.Background(), &flipt.CreateRolloutRequest{
- NamespaceKey: s.namespace,
- FlagKey: flagKey,
- Rank: 1,
- Rule: &flipt.CreateRolloutRequest_Segment{
- Segment: &flipt.RolloutSegment{
- Value: true,
- SegmentKey: segmentKey,
- SegmentOperator: flipt.SegmentOperator_AND_SEGMENT_OPERATOR,
- },
- },
- })
- require.NoError(t, err)
- },
- func(t *testing.T, flagKey string) {
- rollouts, err := s.store.ListRollouts(context.Background(), storage.ListWithOptions(storage.NewResource(s.namespace, flagKey)))
- require.NoError(t, err)
- assert.Len(t, rollouts.Results, 1)
- err = s.store.DeleteRollout(context.Background(), &flipt.DeleteRolloutRequest{
- Id: rollouts.Results[0].Id,
- NamespaceKey: s.namespace,
- FlagKey: flagKey,
- })
- require.NoError(t, err)
- },
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.Type.String(), func(t *testing.T) {
- flag, err := s.store.CreateFlag(context.TODO(), &flipt.CreateFlagRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- Type: tt.Type,
- Enabled: true,
- })
-
- require.NoError(t, err)
- assert.NotNil(t, flag)
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- tt.add(t, flag.Key, segment.Key)
-
- // try to delete segment with attached rule
- err = s.store.DeleteSegment(context.TODO(), &flipt.DeleteSegmentRequest{
- NamespaceKey: s.namespace,
- Key: segment.Key,
- })
-
- require.EqualError(t, err, fmt.Sprintf("segment \"%s/%s\" is in use", s.namespace, t.Name()))
-
- tt.remove(t, flag.Key)
-
- err = s.store.DeleteSegment(context.TODO(), &flipt.DeleteSegmentRequest{
- NamespaceKey: s.namespace,
- Key: segment.Key,
- })
-
- require.NoError(t, err)
- })
- }
-}
-
-func (s *DBTestSuite) TestDeleteSegment_NotFound() {
- t := s.T()
-
- err := s.store.DeleteSegment(context.TODO(), &flipt.DeleteSegmentRequest{Key: "foo"})
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestDeleteSegmentNamespace_NotFound() {
- t := s.T()
-
- err := s.store.DeleteSegment(context.TODO(), &flipt.DeleteSegmentRequest{
- NamespaceKey: s.namespace,
- Key: "foo",
- })
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestCreateConstraint() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- constraint, err := s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- SegmentKey: segment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "EQ",
- Value: "bar",
- Description: "desc",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, constraint)
-
- assert.NotZero(t, constraint.Id)
- assert.Equal(t, storage.DefaultNamespace, constraint.NamespaceKey)
- assert.Equal(t, segment.Key, constraint.SegmentKey)
- assert.Equal(t, flipt.ComparisonType_STRING_COMPARISON_TYPE, constraint.Type)
- assert.Equal(t, "foo", constraint.Property)
- assert.Equal(t, flipt.OpEQ, constraint.Operator)
- assert.Equal(t, "bar", constraint.Value)
- assert.NotZero(t, constraint.CreatedAt)
- assert.Equal(t, constraint.CreatedAt.Seconds, constraint.UpdatedAt.Seconds)
- assert.Equal(t, "desc", constraint.Description)
-
- // get the segment again
- segment, err = s.store.GetSegment(context.TODO(), storage.NewResource(storage.DefaultNamespace, segment.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- assert.Len(t, segment.Constraints, 1)
-}
-
-func (s *DBTestSuite) TestCreateConstraintNamespace() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- constraint, err := s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- NamespaceKey: s.namespace,
- SegmentKey: segment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "EQ",
- Value: "bar",
- Description: "desc",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, constraint)
-
- assert.NotZero(t, constraint.Id)
- assert.Equal(t, s.namespace, constraint.NamespaceKey)
- assert.Equal(t, segment.Key, constraint.SegmentKey)
- assert.Equal(t, flipt.ComparisonType_STRING_COMPARISON_TYPE, constraint.Type)
- assert.Equal(t, "foo", constraint.Property)
- assert.Equal(t, flipt.OpEQ, constraint.Operator)
- assert.Equal(t, "bar", constraint.Value)
- assert.NotZero(t, constraint.CreatedAt)
- assert.Equal(t, constraint.CreatedAt.Seconds, constraint.UpdatedAt.Seconds)
- assert.Equal(t, "desc", constraint.Description)
-
- // get the segment again
- segment, err = s.store.GetSegment(context.TODO(), storage.NewResource(s.namespace, segment.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- assert.Len(t, segment.Constraints, 1)
-}
-
-func (s *DBTestSuite) TestCreateConstraint_SegmentNotFound() {
- t := s.T()
-
- _, err := s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- SegmentKey: "foo",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "NEQ",
- Value: "baz",
- })
-
- assert.EqualError(t, err, "segment \"default/foo\" not found")
-}
-
-func (s *DBTestSuite) TestCreateConstraintNamespace_SegmentNotFound() {
- t := s.T()
-
- _, err := s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- NamespaceKey: s.namespace,
- SegmentKey: "foo",
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "NEQ",
- Value: "baz",
- })
-
- assert.EqualError(t, err, fmt.Sprintf("segment \"%s/foo\" not found", s.namespace))
-}
-
-// see: https://github.com/flipt-io/flipt/pull/1721/
-func (s *DBTestSuite) TestGetSegmentWithConstraintMultiNamespace() {
- t := s.T()
-
- for _, namespace := range []string{storage.DefaultNamespace, s.namespace} {
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- constraint, err := s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- NamespaceKey: namespace,
- SegmentKey: segment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "EQ",
- Value: "bar",
- Description: "desc",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, constraint)
-
- assert.NotZero(t, constraint.Id)
- assert.Equal(t, namespace, constraint.NamespaceKey)
- assert.Equal(t, segment.Key, constraint.SegmentKey)
- assert.Equal(t, flipt.ComparisonType_STRING_COMPARISON_TYPE, constraint.Type)
- assert.Equal(t, "foo", constraint.Property)
- assert.Equal(t, flipt.OpEQ, constraint.Operator)
- assert.Equal(t, "bar", constraint.Value)
- assert.NotZero(t, constraint.CreatedAt)
- assert.Equal(t, constraint.CreatedAt.Seconds, constraint.UpdatedAt.Seconds)
- assert.Equal(t, "desc", constraint.Description)
- }
-
- // get the default namespaced segment
- segment, err := s.store.GetSegment(context.TODO(), storage.NewResource(storage.DefaultNamespace, t.Name()))
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- // ensure we aren't crossing namespaces
- assert.Len(t, segment.Constraints, 1)
-
- constraint := segment.Constraints[0]
- assert.NotZero(t, constraint.Id)
- assert.Equal(t, storage.DefaultNamespace, constraint.NamespaceKey)
- assert.Equal(t, segment.Key, constraint.SegmentKey)
- assert.Equal(t, flipt.ComparisonType_STRING_COMPARISON_TYPE, constraint.Type)
- assert.Equal(t, "foo", constraint.Property)
- assert.Equal(t, flipt.OpEQ, constraint.Operator)
- assert.Equal(t, "bar", constraint.Value)
- assert.NotZero(t, constraint.CreatedAt)
- assert.Equal(t, constraint.CreatedAt.Seconds, constraint.UpdatedAt.Seconds)
- assert.Equal(t, "desc", constraint.Description)
-}
-
-func (s *DBTestSuite) TestUpdateConstraint() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- constraint, err := s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- SegmentKey: segment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "EQ",
- Value: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, constraint)
-
- assert.NotZero(t, constraint.Id)
- assert.Equal(t, storage.DefaultNamespace, constraint.NamespaceKey)
- assert.Equal(t, segment.Key, constraint.SegmentKey)
- assert.Equal(t, flipt.ComparisonType_STRING_COMPARISON_TYPE, constraint.Type)
- assert.Equal(t, "foo", constraint.Property)
- assert.Equal(t, flipt.OpEQ, constraint.Operator)
- assert.Equal(t, "bar", constraint.Value)
- assert.NotZero(t, constraint.CreatedAt)
- assert.Equal(t, constraint.CreatedAt.Seconds, constraint.UpdatedAt.Seconds)
-
- updated, err := s.store.UpdateConstraint(context.TODO(), &flipt.UpdateConstraintRequest{
- Id: constraint.Id,
- SegmentKey: constraint.SegmentKey,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "EMPTY",
- Value: "bar",
- Description: "desc",
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, constraint.Id, updated.Id)
- assert.Equal(t, storage.DefaultNamespace, updated.NamespaceKey)
- assert.Equal(t, constraint.SegmentKey, updated.SegmentKey)
- assert.Equal(t, constraint.Type, updated.Type)
- assert.Equal(t, constraint.Property, updated.Property)
- assert.Equal(t, flipt.OpEmpty, updated.Operator)
- assert.Empty(t, updated.Value)
- assert.NotZero(t, updated.CreatedAt)
- assert.NotZero(t, updated.UpdatedAt)
- assert.Equal(t, "desc", updated.Description)
-
- // get the segment again
- segment, err = s.store.GetSegment(context.TODO(), storage.NewResource(storage.DefaultNamespace, segment.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- assert.Len(t, segment.Constraints, 1)
-}
-
-func (s *DBTestSuite) TestUpdateConstraintNamespace() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- constraint, err := s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- NamespaceKey: s.namespace,
- SegmentKey: segment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "EQ",
- Value: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, constraint)
-
- assert.NotZero(t, constraint.Id)
- assert.Equal(t, s.namespace, constraint.NamespaceKey)
- assert.Equal(t, segment.Key, constraint.SegmentKey)
- assert.Equal(t, flipt.ComparisonType_STRING_COMPARISON_TYPE, constraint.Type)
- assert.Equal(t, "foo", constraint.Property)
- assert.Equal(t, flipt.OpEQ, constraint.Operator)
- assert.Equal(t, "bar", constraint.Value)
- assert.NotZero(t, constraint.CreatedAt)
- assert.Equal(t, constraint.CreatedAt.Seconds, constraint.UpdatedAt.Seconds)
-
- updated, err := s.store.UpdateConstraint(context.TODO(), &flipt.UpdateConstraintRequest{
- Id: constraint.Id,
- NamespaceKey: s.namespace,
- SegmentKey: constraint.SegmentKey,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "EMPTY",
- Value: "bar",
- Description: "desc",
- })
-
- require.NoError(t, err)
-
- assert.Equal(t, constraint.Id, updated.Id)
- assert.Equal(t, s.namespace, updated.NamespaceKey)
- assert.Equal(t, constraint.SegmentKey, updated.SegmentKey)
- assert.Equal(t, constraint.Type, updated.Type)
- assert.Equal(t, constraint.Property, updated.Property)
- assert.Equal(t, flipt.OpEmpty, updated.Operator)
- assert.Empty(t, updated.Value)
- assert.NotZero(t, updated.CreatedAt)
- assert.NotZero(t, updated.UpdatedAt)
- assert.Equal(t, "desc", updated.Description)
-
- // get the segment again
- segment, err = s.store.GetSegment(context.TODO(), storage.NewResource(s.namespace, segment.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- assert.Len(t, segment.Constraints, 1)
-}
-
-func (s *DBTestSuite) TestUpdateConstraint_NotFound() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- _, err = s.store.UpdateConstraint(context.TODO(), &flipt.UpdateConstraintRequest{
- Id: "foo",
- SegmentKey: segment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "NEQ",
- Value: "baz",
- })
-
- assert.EqualError(t, err, "constraint \"foo\" not found")
-}
-
-func (s *DBTestSuite) TestUpdateConstraintNamespace_NotFound() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- _, err = s.store.UpdateConstraint(context.TODO(), &flipt.UpdateConstraintRequest{
- Id: "foo",
- NamespaceKey: s.namespace,
- SegmentKey: segment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "NEQ",
- Value: "baz",
- })
-
- assert.EqualError(t, err, "constraint \"foo\" not found")
-}
-
-func (s *DBTestSuite) TestDeleteConstraint() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- constraint, err := s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- SegmentKey: segment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "EQ",
- Value: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, constraint)
-
- err = s.store.DeleteConstraint(context.TODO(), &flipt.DeleteConstraintRequest{SegmentKey: constraint.SegmentKey, Id: constraint.Id})
- require.NoError(t, err)
-
- // get the segment again
- segment, err = s.store.GetSegment(context.TODO(), storage.NewResource(storage.DefaultNamespace, segment.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- assert.Empty(t, segment.Constraints)
-}
-
-func (s *DBTestSuite) TestDeleteConstraintNamespace() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- constraint, err := s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- NamespaceKey: s.namespace,
- SegmentKey: segment.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: "foo",
- Operator: "EQ",
- Value: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, constraint)
-
- err = s.store.DeleteConstraint(context.TODO(), &flipt.DeleteConstraintRequest{
- NamespaceKey: s.namespace,
- SegmentKey: constraint.SegmentKey,
- Id: constraint.Id,
- })
- require.NoError(t, err)
-
- // get the segment again
- segment, err = s.store.GetSegment(context.TODO(), storage.NewResource(s.namespace, segment.Key))
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- assert.Empty(t, segment.Constraints)
-}
-
-func (s *DBTestSuite) TestDeleteConstraint_NotFound() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- err = s.store.DeleteConstraint(context.TODO(), &flipt.DeleteConstraintRequest{
- Id: "foo",
- SegmentKey: segment.Key,
- })
-
- require.NoError(t, err)
-}
-
-func (s *DBTestSuite) TestDeleteConstraintNamespace_NotFound() {
- t := s.T()
-
- segment, err := s.store.CreateSegment(context.TODO(), &flipt.CreateSegmentRequest{
- NamespaceKey: s.namespace,
- Key: t.Name(),
- Name: "foo",
- Description: "bar",
- })
-
- require.NoError(t, err)
- assert.NotNil(t, segment)
-
- err = s.store.DeleteConstraint(context.TODO(), &flipt.DeleteConstraintRequest{
- Id: "foo",
- NamespaceKey: s.namespace,
- SegmentKey: segment.Key,
- })
-
- require.NoError(t, err)
-}
-
-func BenchmarkListSegments(b *testing.B) {
- s := new(DBTestSuite)
- t := &testing.T{}
- s.SetT(t)
- s.SetupSuite()
-
- for i := 0; i < 1000; i++ {
- reqs := []*flipt.CreateSegmentRequest{
- {
- Key: uuid.NewString(),
- Name: fmt.Sprintf("foo_%d", i),
- },
- }
-
- for _, req := range reqs {
- ss, err := s.store.CreateSegment(context.TODO(), req)
- require.NoError(t, err)
- assert.NotNil(t, ss)
-
- for j := 0; j < 10; j++ {
- v, err := s.store.CreateConstraint(context.TODO(), &flipt.CreateConstraintRequest{
- SegmentKey: ss.Key,
- Type: flipt.ComparisonType_STRING_COMPARISON_TYPE,
- Property: fmt.Sprintf("foo_%d", j),
- Operator: "EQ",
- Value: fmt.Sprintf("bar_%d", j),
- })
-
- require.NoError(t, err)
- assert.NotNil(t, v)
- }
- }
- }
-
- b.ResetTimer()
-
- req := storage.ListWithOptions(storage.NewNamespace(storage.DefaultNamespace))
- b.Run("no-pagination", func(b *testing.B) {
- req := req
- for i := 0; i < b.N; i++ {
- segments, err := s.store.ListSegments(context.TODO(), req)
- require.NoError(t, err)
- assert.NotEmpty(t, segments)
- }
- })
-
- for _, pageSize := range []uint64{10, 25, 100, 500} {
- req := req
- req.QueryParams.Limit = pageSize
- b.Run(fmt.Sprintf("pagination-limit-%d", pageSize), func(b *testing.B) {
- for i := 0; i < b.N; i++ {
- segments, err := s.store.ListSegments(context.TODO(), req)
- require.NoError(t, err)
- assert.NotEmpty(t, segments)
- }
- })
- }
-
- b.Run("pagination", func(b *testing.B) {
- req := req
- req.QueryParams.Limit = 500
- req.QueryParams.Offset = 50
- req.QueryParams.Order = storage.OrderDesc
- for i := 0; i < b.N; i++ {
- segments, err := s.store.ListSegments(context.TODO(), req)
- require.NoError(t, err)
- assert.NotEmpty(t, segments)
- }
- })
-
- s.TearDownSuite()
-}
diff --git a/internal/storage/sql/sqlite/sqlite.go b/internal/storage/sql/sqlite/sqlite.go
deleted file mode 100644
index 47e6553a76..0000000000
--- a/internal/storage/sql/sqlite/sqlite.go
+++ /dev/null
@@ -1,233 +0,0 @@
-package sqlite
-
-import (
- "context"
- "database/sql"
- "errors"
-
- sq "github.com/Masterminds/squirrel"
- "github.com/mattn/go-sqlite3"
- errs "go.flipt.io/flipt/errors"
- "go.flipt.io/flipt/internal/storage"
- "go.flipt.io/flipt/internal/storage/sql/common"
- flipt "go.flipt.io/flipt/rpc/flipt"
- "go.uber.org/zap"
-)
-
-var _ storage.Store = &Store{}
-
-// NewStore creates a new sqlite.Store
-func NewStore(db *sql.DB, builder sq.StatementBuilderType, logger *zap.Logger) *Store {
- return &Store{
- Store: common.NewStore(db, builder, logger),
- }
-}
-
-// Store is a sqlite specific implementation of storage.Store
-type Store struct {
- *common.Store
-}
-
-func (s *Store) String() string {
- return "sqlite"
-}
-
-func (s *Store) CreateNamespace(ctx context.Context, r *flipt.CreateNamespaceRequest) (*flipt.Namespace, error) {
- namespace, err := s.Store.CreateNamespace(ctx, r)
- if err != nil {
- var serr sqlite3.Error
-
- if errors.As(err, &serr) && serr.ExtendedCode == sqlite3.ErrConstraintPrimaryKey {
- return nil, errs.ErrInvalidf(`namespace "%s" is not unique`, r.Key)
- }
-
- return nil, err
- }
-
- return namespace, nil
-}
-
-func (s *Store) CreateFlag(ctx context.Context, r *flipt.CreateFlagRequest) (*flipt.Flag, error) {
- flag, err := s.Store.CreateFlag(ctx, r)
- if err != nil {
- var serr sqlite3.Error
-
- if errors.As(err, &serr) {
- switch serr.ExtendedCode {
- case sqlite3.ErrConstraintForeignKey:
- return nil, errs.ErrNotFoundf("namespace %q", r.NamespaceKey)
- case sqlite3.ErrConstraintPrimaryKey:
- return nil, errs.ErrInvalidf(`flag "%s/%s" is not unique`, r.NamespaceKey, r.Key)
- }
- }
-
- return nil, err
- }
-
- return flag, nil
-}
-
-func (s *Store) UpdateFlag(ctx context.Context, r *flipt.UpdateFlagRequest) (*flipt.Flag, error) {
- flag, err := s.Store.UpdateFlag(ctx, r)
- if err != nil {
- var serr sqlite3.Error
-
- if errors.As(err, &serr) && serr.ExtendedCode == sqlite3.ErrConstraintForeignKey {
- if r.DefaultVariantId != "" {
- return nil, errs.ErrInvalidf(`variant %q not found for flag "%s/%s"`, r.DefaultVariantId, r.NamespaceKey, r.Key)
- }
-
- return nil, errs.ErrInvalidf(`flag "%s/%s" not found`, r.NamespaceKey, r.Key)
- }
-
- return nil, err
- }
-
- return flag, nil
-}
-
-func (s *Store) CreateVariant(ctx context.Context, r *flipt.CreateVariantRequest) (*flipt.Variant, error) {
- variant, err := s.Store.CreateVariant(ctx, r)
- if err != nil {
- var serr sqlite3.Error
-
- if errors.As(err, &serr) {
- switch serr.ExtendedCode {
- case sqlite3.ErrConstraintForeignKey:
- return nil, errs.ErrNotFoundf(`flag "%s/%s"`, r.NamespaceKey, r.FlagKey)
- case sqlite3.ErrConstraintUnique:
- return nil, errs.ErrInvalidf(`variant %q is not unique for flag "%s/%s"`, r.Key, r.NamespaceKey, r.FlagKey)
- }
- }
-
- return nil, err
- }
-
- return variant, nil
-}
-
-func (s *Store) UpdateVariant(ctx context.Context, r *flipt.UpdateVariantRequest) (*flipt.Variant, error) {
- variant, err := s.Store.UpdateVariant(ctx, r)
- if err != nil {
- var serr sqlite3.Error
-
- if errors.As(err, &serr) && serr.Code == sqlite3.ErrConstraint {
- return nil, errs.ErrInvalidf(`variant %q is not unique for flag "%s/%s"`, r.Key, r.NamespaceKey, r.FlagKey)
- }
-
- return nil, err
- }
-
- return variant, nil
-}
-
-func (s *Store) CreateSegment(ctx context.Context, r *flipt.CreateSegmentRequest) (*flipt.Segment, error) {
- segment, err := s.Store.CreateSegment(ctx, r)
- if err != nil {
- var serr sqlite3.Error
-
- if errors.As(err, &serr) {
- switch serr.ExtendedCode {
- case sqlite3.ErrConstraintForeignKey:
- return nil, errs.ErrNotFoundf("namespace %q", r.NamespaceKey)
- case sqlite3.ErrConstraintPrimaryKey:
- return nil, errs.ErrInvalidf(`segment "%s/%s" is not unique`, r.NamespaceKey, r.Key)
- }
- }
-
- return nil, err
- }
-
- return segment, nil
-}
-
-func (s *Store) CreateConstraint(ctx context.Context, r *flipt.CreateConstraintRequest) (*flipt.Constraint, error) {
- constraint, err := s.Store.CreateConstraint(ctx, r)
- if err != nil {
- var serr sqlite3.Error
-
- if errors.As(err, &serr) && serr.Code == sqlite3.ErrConstraint {
- return nil, errs.ErrNotFoundf(`segment "%s/%s"`, r.NamespaceKey, r.SegmentKey)
- }
-
- return nil, err
- }
-
- return constraint, nil
-}
-
-func (s *Store) CreateRollout(ctx context.Context, r *flipt.CreateRolloutRequest) (*flipt.Rollout, error) {
- rollout, err := s.Store.CreateRollout(ctx, r)
- if err != nil {
- var serr sqlite3.Error
-
- if errors.As(err, &serr) && serr.Code == sqlite3.ErrConstraint {
- if segment := r.GetSegment(); segment != nil {
- return nil, errs.ErrNotFoundf(`flag "%s/%s or segment %s"`, r.NamespaceKey, r.FlagKey, segment.SegmentKey)
- }
- return nil, errs.ErrNotFoundf(`flag "%s/%s"`, r.NamespaceKey, r.FlagKey)
- }
-
- return nil, err
- }
-
- return rollout, nil
-}
-
-func (s *Store) CreateRule(ctx context.Context, r *flipt.CreateRuleRequest) (*flipt.Rule, error) {
- rule, err := s.Store.CreateRule(ctx, r)
- if err != nil {
- var serr sqlite3.Error
-
- if errors.As(err, &serr) && serr.Code == sqlite3.ErrConstraint {
- return nil, errs.ErrNotFoundf(`flag "%s/%s" or segment "%s/%s"`, r.NamespaceKey, r.FlagKey, r.NamespaceKey, r.SegmentKey)
- }
-
- return nil, err
- }
-
- return rule, nil
-}
-
-func (s *Store) UpdateRule(ctx context.Context, r *flipt.UpdateRuleRequest) (*flipt.Rule, error) {
- rule, err := s.Store.UpdateRule(ctx, r)
- if err != nil {
- var serr sqlite3.Error
-
- if errors.As(err, &serr) && serr.Code == sqlite3.ErrConstraint {
- return nil, errs.ErrNotFoundf(`rule "%s/%s"`, r.NamespaceKey, r.Id)
- }
-
- return nil, err
- }
-
- return rule, nil
-}
-
-func (s *Store) CreateDistribution(ctx context.Context, r *flipt.CreateDistributionRequest) (*flipt.Distribution, error) {
- dist, err := s.Store.CreateDistribution(ctx, r)
- if err != nil {
- var serr sqlite3.Error
-
- if errors.As(err, &serr) && serr.Code == sqlite3.ErrConstraint {
- return nil, errs.ErrNotFoundf("variant %q, rule %q, flag %q in namespace %q", r.VariantId, r.RuleId, r.FlagKey, r.NamespaceKey)
- }
-
- return nil, err
- }
-
- return dist, nil
-}
-
-func (s *Store) DeleteSegment(ctx context.Context, r *flipt.DeleteSegmentRequest) error {
- err := s.Store.DeleteSegment(ctx, r)
- if err != nil {
- var serr sqlite3.Error
-
- if errors.As(err, &serr) && serr.Code == sqlite3.ErrConstraint {
- return errs.ErrInvalidf(`segment "%s/%s" is in use`, r.NamespaceKey, r.Key)
- }
- }
-
- return err
-}
diff --git a/internal/storage/sql/testdata/benchmark_test.yml b/internal/storage/sql/testdata/benchmark_test.yml
deleted file mode 100644
index f918fd8b1d..0000000000
--- a/internal/storage/sql/testdata/benchmark_test.yml
+++ /dev/null
@@ -1,16260 +0,0 @@
-version: "1.1"
-flags:
-- key: flag_001
- name: FLAG_001
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_002
- name: FLAG_002
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_003
- name: FLAG_003
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_004
- name: FLAG_004
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_005
- name: FLAG_005
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_006
- name: FLAG_006
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_007
- name: FLAG_007
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_008
- name: FLAG_008
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_009
- name: FLAG_009
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_010
- name: FLAG_010
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_011
- name: FLAG_011
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_012
- name: FLAG_012
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_013
- name: FLAG_013
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_014
- name: FLAG_014
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_015
- name: FLAG_015
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_016
- name: FLAG_016
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_017
- name: FLAG_017
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_018
- name: FLAG_018
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_019
- name: FLAG_019
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_020
- name: FLAG_020
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_021
- name: FLAG_021
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_022
- name: FLAG_022
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_023
- name: FLAG_023
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_024
- name: FLAG_024
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_025
- name: FLAG_025
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_026
- name: FLAG_026
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_027
- name: FLAG_027
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_028
- name: FLAG_028
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_029
- name: FLAG_029
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_030
- name: FLAG_030
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_031
- name: FLAG_031
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_032
- name: FLAG_032
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_033
- name: FLAG_033
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_034
- name: FLAG_034
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_035
- name: FLAG_035
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_036
- name: FLAG_036
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_037
- name: FLAG_037
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_038
- name: FLAG_038
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_039
- name: FLAG_039
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_040
- name: FLAG_040
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_041
- name: FLAG_041
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_042
- name: FLAG_042
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_043
- name: FLAG_043
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_044
- name: FLAG_044
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_045
- name: FLAG_045
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_046
- name: FLAG_046
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_047
- name: FLAG_047
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_048
- name: FLAG_048
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_049
- name: FLAG_049
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_050
- name: FLAG_050
- type: VARIANT_FLAG_TYPE
- description: Some Description
- enabled: true
- variants:
- - key: variant_001
- name: VARIANT_001
- - key: variant_002
- name: VARIANT_002
- rules:
- - segment: segment_001
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_002
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_003
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_004
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_005
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_006
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_007
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_008
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_009
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_010
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_011
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_012
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_013
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_014
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_015
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_016
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_017
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_018
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_019
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_020
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_021
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_022
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_023
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_024
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_025
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_026
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_027
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_028
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_029
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_030
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_031
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_032
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_033
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_034
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_035
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_036
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_037
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_038
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_039
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_040
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_041
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_042
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_043
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_044
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_045
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_046
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_047
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_048
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_049
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
- - segment: segment_050
- distributions:
- - variant: variant_001
- rollout: 50
- - variant: variant_002
- rollout: 50
-- key: flag_boolean
- name: FLAG_BOOLEAN
- type: BOOLEAN_FLAG_TYPE
- description: Boolean Flag Description
- enabled: false
- rollouts:
- - description: enabled for segment_001
- segment:
- key: segment_001
- value: true
- - description: enabled for 50%
- threshold:
- percentage: 50
- value: true
- - description: disabled for segment_002
- segment:
- key: segment_002
- - description: enabled for segment_003
- segment:
- key: segment_003
- value: true
- - description: disabled for segment_004
- segment:
- key: segment_004
-- key: another_boolean_flag
- name: FLAG_BOOLEAN
- type: BOOLEAN_FLAG_TYPE
- description: Boolean Flag Description
- enabled: false
- rollouts:
- - description: enabled for segment_001
- segment:
- key: segment_001
- value: true
- - description: enabled for 50%
- threshold:
- percentage: 50
- value: true
- - description: enabled for segment_003
- segment:
- key: segment_003
- value: true
- - description: enabled for 70%
- threshold:
- percentage: 70
- value: false
- - description: disabled for segment_002
- segment:
- key: segment_002
- - description: disabled for segment_004
- segment:
- key: segment_004
-- key: flag_disabled
- name: FLAG_DISABLED
- type: VARIANT_FLAG_TYPE
- description: Disabled Flag Description
- enabled: false
-segments:
-- key: segment_001
- name: SEGMENT_001
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_001
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_001
- match_type: ALL_MATCH_TYPE
-- key: segment_002
- name: SEGMENT_002
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_002
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_002
- match_type: ALL_MATCH_TYPE
-- key: segment_003
- name: SEGMENT_003
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_003
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_003
- match_type: ALL_MATCH_TYPE
-- key: segment_004
- name: SEGMENT_004
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_004
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_004
- match_type: ALL_MATCH_TYPE
-- key: segment_005
- name: SEGMENT_005
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_005
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_005
- match_type: ALL_MATCH_TYPE
-- key: segment_006
- name: SEGMENT_006
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_006
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_006
- match_type: ALL_MATCH_TYPE
-- key: segment_007
- name: SEGMENT_007
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_007
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_007
- match_type: ALL_MATCH_TYPE
-- key: segment_008
- name: SEGMENT_008
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_008
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_008
- match_type: ALL_MATCH_TYPE
-- key: segment_009
- name: SEGMENT_009
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_009
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_009
- match_type: ALL_MATCH_TYPE
-- key: segment_010
- name: SEGMENT_010
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_010
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_010
- match_type: ALL_MATCH_TYPE
-- key: segment_011
- name: SEGMENT_011
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_011
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_011
- match_type: ALL_MATCH_TYPE
-- key: segment_012
- name: SEGMENT_012
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_012
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_012
- match_type: ALL_MATCH_TYPE
-- key: segment_013
- name: SEGMENT_013
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_013
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_013
- match_type: ALL_MATCH_TYPE
-- key: segment_014
- name: SEGMENT_014
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_014
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_014
- match_type: ALL_MATCH_TYPE
-- key: segment_015
- name: SEGMENT_015
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_015
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_015
- match_type: ALL_MATCH_TYPE
-- key: segment_016
- name: SEGMENT_016
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_016
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_016
- match_type: ALL_MATCH_TYPE
-- key: segment_017
- name: SEGMENT_017
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_017
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_017
- match_type: ALL_MATCH_TYPE
-- key: segment_018
- name: SEGMENT_018
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_018
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_018
- match_type: ALL_MATCH_TYPE
-- key: segment_019
- name: SEGMENT_019
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_019
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_019
- match_type: ALL_MATCH_TYPE
-- key: segment_020
- name: SEGMENT_020
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_020
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_020
- match_type: ALL_MATCH_TYPE
-- key: segment_021
- name: SEGMENT_021
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_021
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_021
- match_type: ALL_MATCH_TYPE
-- key: segment_022
- name: SEGMENT_022
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_022
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_022
- match_type: ALL_MATCH_TYPE
-- key: segment_023
- name: SEGMENT_023
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_023
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_023
- match_type: ALL_MATCH_TYPE
-- key: segment_024
- name: SEGMENT_024
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_024
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_024
- match_type: ALL_MATCH_TYPE
-- key: segment_025
- name: SEGMENT_025
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_025
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_025
- match_type: ALL_MATCH_TYPE
-- key: segment_026
- name: SEGMENT_026
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_026
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_026
- match_type: ALL_MATCH_TYPE
-- key: segment_027
- name: SEGMENT_027
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_027
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_027
- match_type: ALL_MATCH_TYPE
-- key: segment_028
- name: SEGMENT_028
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_028
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_028
- match_type: ALL_MATCH_TYPE
-- key: segment_029
- name: SEGMENT_029
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_029
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_029
- match_type: ALL_MATCH_TYPE
-- key: segment_030
- name: SEGMENT_030
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_030
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_030
- match_type: ALL_MATCH_TYPE
-- key: segment_031
- name: SEGMENT_031
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_031
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_031
- match_type: ALL_MATCH_TYPE
-- key: segment_032
- name: SEGMENT_032
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_032
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_032
- match_type: ALL_MATCH_TYPE
-- key: segment_033
- name: SEGMENT_033
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_033
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_033
- match_type: ALL_MATCH_TYPE
-- key: segment_034
- name: SEGMENT_034
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_034
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_034
- match_type: ALL_MATCH_TYPE
-- key: segment_035
- name: SEGMENT_035
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_035
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_035
- match_type: ALL_MATCH_TYPE
-- key: segment_036
- name: SEGMENT_036
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_036
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_036
- match_type: ALL_MATCH_TYPE
-- key: segment_037
- name: SEGMENT_037
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_037
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_037
- match_type: ALL_MATCH_TYPE
-- key: segment_038
- name: SEGMENT_038
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_038
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_038
- match_type: ALL_MATCH_TYPE
-- key: segment_039
- name: SEGMENT_039
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_039
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_039
- match_type: ALL_MATCH_TYPE
-- key: segment_040
- name: SEGMENT_040
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_040
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_040
- match_type: ALL_MATCH_TYPE
-- key: segment_041
- name: SEGMENT_041
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_041
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_041
- match_type: ALL_MATCH_TYPE
-- key: segment_042
- name: SEGMENT_042
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_042
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_042
- match_type: ALL_MATCH_TYPE
-- key: segment_043
- name: SEGMENT_043
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_043
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_043
- match_type: ALL_MATCH_TYPE
-- key: segment_044
- name: SEGMENT_044
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_044
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_044
- match_type: ALL_MATCH_TYPE
-- key: segment_045
- name: SEGMENT_045
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_045
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_045
- match_type: ALL_MATCH_TYPE
-- key: segment_046
- name: SEGMENT_046
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_046
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_046
- match_type: ALL_MATCH_TYPE
-- key: segment_047
- name: SEGMENT_047
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_047
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_047
- match_type: ALL_MATCH_TYPE
-- key: segment_048
- name: SEGMENT_048
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_048
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_048
- match_type: ALL_MATCH_TYPE
-- key: segment_049
- name: SEGMENT_049
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_049
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_049
- match_type: ALL_MATCH_TYPE
-- key: segment_050
- name: SEGMENT_050
- description: Some Segment Description
- constraints:
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_050
- - type: STRING_COMPARISON_TYPE
- property: in_segment
- operator: eq
- value: segment_050
- match_type: ALL_MATCH_TYPE
diff --git a/internal/storage/sql/testing/testing.go b/internal/storage/sql/testing/testing.go
deleted file mode 100644
index 22d5722d4c..0000000000
--- a/internal/storage/sql/testing/testing.go
+++ /dev/null
@@ -1,336 +0,0 @@
-package testing
-
-import (
- "context"
- "database/sql"
- "errors"
- "fmt"
- "log"
- "os"
- "strconv"
- "strings"
- "time"
-
- "github.com/docker/go-connections/nat"
- "github.com/golang-migrate/migrate/v4"
- "github.com/golang-migrate/migrate/v4/database"
- "github.com/golang-migrate/migrate/v4/database/cockroachdb"
- "github.com/testcontainers/testcontainers-go"
- "github.com/testcontainers/testcontainers-go/wait"
- "go.flipt.io/flipt/config/migrations"
- "go.flipt.io/flipt/internal/config"
- fliptsql "go.flipt.io/flipt/internal/storage/sql"
-
- ms "github.com/golang-migrate/migrate/v4/database/mysql"
- pg "github.com/golang-migrate/migrate/v4/database/postgres"
- "github.com/golang-migrate/migrate/v4/database/sqlite3"
- _ "github.com/golang-migrate/migrate/v4/source/file"
- "github.com/golang-migrate/migrate/v4/source/iofs"
-)
-
-const defaultTestDBPrefix = "flipt_*.db"
-
-type Database struct {
- DB *sql.DB
- Driver fliptsql.Driver
- Container *DBContainer
-
- cleanup func()
-}
-
-func (d *Database) Shutdown(ctx context.Context) {
- if d.DB != nil {
- d.DB.Close()
- }
-
- if d.Container != nil {
- _ = d.Container.StopLogProducer()
- _ = d.Container.Terminate(ctx)
- }
-
- if d.cleanup != nil {
- d.cleanup()
- }
-}
-
-func Open() (*Database, error) {
- var proto config.DatabaseProtocol
-
- switch os.Getenv("FLIPT_TEST_DATABASE_PROTOCOL") {
- case "cockroachdb", "cockroach":
- proto = config.DatabaseCockroachDB
- case "postgres":
- proto = config.DatabasePostgres
- case "mysql":
- proto = config.DatabaseMySQL
- case "libsql":
- proto = config.DatabaseLibSQL
- default:
- proto = config.DatabaseSQLite
- }
-
- cfg := config.Config{
- Database: config.DatabaseConfig{
- Protocol: proto,
- },
- }
-
- var (
- username, password, dbName string
- useTestContainer bool
- cleanup func()
- )
-
- if url := os.Getenv("FLIPT_TEST_DB_URL"); len(url) > 0 {
- // FLIPT_TEST_DB_URL takes precedent if set.
- // It assumes the database is already running at the target URL.
- // It does not attempt to create an instance of the DB or do any cleanup.
- cfg.Database.URL = url
- } else {
- // Otherwise, depending on the value of FLIPT_TEST_DATABASE_PROTOCOL a test database
- // is created and destroyed for the lifecycle of the test.
- switch proto {
- case config.DatabaseSQLite:
- dbPath := createTempDBPath()
- cfg.Database.URL = "file:" + dbPath
- cleanup = func() {
- _ = os.Remove(dbPath)
- }
- case config.DatabaseLibSQL:
- dbPath := createTempDBPath()
- cfg.Database.URL = "libsql://file:" + dbPath
- cleanup = func() {
- _ = os.Remove(dbPath)
- }
- case config.DatabaseCockroachDB:
- useTestContainer = true
- username = "root"
- password = ""
- dbName = "defaultdb"
- default:
- useTestContainer = true
- username = "flipt"
- password = "password"
- dbName = "flipt_test"
- }
- }
-
- var (
- container *DBContainer
- err error
- )
-
- if useTestContainer {
- container, err = NewDBContainer(context.Background(), proto)
- if err != nil {
- return nil, fmt.Errorf("creating db container: %w", err)
- }
-
- cfg.Database.URL = ""
- cfg.Database.Host = container.Host
- cfg.Database.Port = container.Port
- cfg.Database.Name = dbName
- cfg.Database.User = username
- cfg.Database.Password = password
- cfg.Database.ConnMaxLifetime = 1 * time.Minute
- }
-
- db, driver, err := fliptsql.Open(cfg, fliptsql.WithMigrate, fliptsql.WithSSLDisabled)
- if err != nil {
- return nil, fmt.Errorf("opening db: %w", err)
- }
-
- mm, err := newMigrator(db, driver)
- if err != nil {
- return nil, fmt.Errorf("creating migrate instance: %w", err)
- }
-
- // run drop to clear target DB (incase we're reusing)
- if err := mm.Drop(); err != nil && !errors.Is(err, migrate.ErrNoChange) {
- return nil, fmt.Errorf("running drop: %w", err)
- }
-
- if err := db.Close(); err != nil {
- return nil, fmt.Errorf("closing db: %w", err)
- }
-
- // need new instance after drop
- db, driver, err = fliptsql.Open(cfg, fliptsql.WithMigrate, fliptsql.WithSSLDisabled)
- if err != nil {
- return nil, fmt.Errorf("opening db: %w", err)
- }
-
- mm, err = newMigrator(db, driver)
- if err != nil {
- return nil, fmt.Errorf("creating migrate instance: %w", err)
- }
-
- if err := mm.Up(); err != nil && !errors.Is(err, migrate.ErrNoChange) {
- return nil, fmt.Errorf("running migrations: %w", err)
- }
-
- if err := db.Close(); err != nil {
- return nil, fmt.Errorf("closing db: %w", err)
- }
-
- // re-open db and enable ANSI mode for MySQL
- db, driver, err = fliptsql.Open(cfg, fliptsql.WithSSLDisabled)
- if err != nil {
- return nil, fmt.Errorf("opening db: %w", err)
- }
-
- db.SetConnMaxLifetime(2 * time.Minute)
- db.SetConnMaxIdleTime(time.Minute)
-
- // 2 minute timeout attempting to establish first connection
- ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
- defer cancel()
- if err := db.PingContext(ctx); err != nil {
- return nil, err
- }
-
- return &Database{
- DB: db,
- Driver: driver,
- Container: container,
- cleanup: cleanup,
- }, nil
-}
-
-func newMigrator(db *sql.DB, driver fliptsql.Driver) (*migrate.Migrate, error) {
- var (
- dr database.Driver
- err error
- )
-
- switch driver {
- case fliptsql.SQLite, fliptsql.LibSQL:
- dr, err = sqlite3.WithInstance(db, &sqlite3.Config{})
- case fliptsql.Postgres:
- dr, err = pg.WithInstance(db, &pg.Config{})
- case fliptsql.CockroachDB:
- dr, err = cockroachdb.WithInstance(db, &cockroachdb.Config{})
- case fliptsql.MySQL:
- dr, err = ms.WithInstance(db, &ms.Config{})
-
- default:
- return nil, fmt.Errorf("unknown driver: %s", driver)
- }
-
- if err != nil {
- return nil, fmt.Errorf("creating driver: %w", err)
- }
-
- // source migrations from embedded config/migrations package
- // relative to the specific driver
- sourceDriver, err := iofs.New(migrations.FS, driver.Migrations())
- if err != nil {
- return nil, fmt.Errorf("constructing migration source driver (db driver %q): %w", driver.String(), err)
- }
-
- return migrate.NewWithInstance("iofs", sourceDriver, driver.Migrations(), dr)
-}
-
-type DBContainer struct {
- testcontainers.Container
- Host string
- Port int
-}
-
-func NewDBContainer(ctx context.Context, proto config.DatabaseProtocol) (*DBContainer, error) {
- var (
- req testcontainers.ContainerRequest
- port nat.Port
- )
-
- switch proto {
- case config.DatabasePostgres:
- port = nat.Port("5432/tcp")
- req = testcontainers.ContainerRequest{
- Image: "postgres:11.2",
- ExposedPorts: []string{"5432/tcp"},
- WaitingFor: wait.ForSQL(port, "postgres", func(host string, port nat.Port) string {
- return fmt.Sprintf("postgres://flipt:password@%s:%s/flipt_test?sslmode=disable", host, port.Port())
- }),
- Env: map[string]string{
- "POSTGRES_USER": "flipt",
- "POSTGRES_PASSWORD": "password",
- "POSTGRES_DB": "flipt_test",
- },
- }
- case config.DatabaseCockroachDB:
- port = nat.Port("26257/tcp")
- req = testcontainers.ContainerRequest{
- Image: "cockroachdb/cockroach:latest-v21.2",
- ExposedPorts: []string{"26257/tcp", "8080/tcp"},
- WaitingFor: wait.ForSQL(port, "postgres", func(host string, port nat.Port) string {
- return fmt.Sprintf("postgres://root@%s:%s/defaultdb?sslmode=disable", host, port.Port())
- }),
- Env: map[string]string{
- "COCKROACH_USER": "root",
- "COCKROACH_DATABASE": "defaultdb",
- },
- Cmd: []string{"start-single-node", "--insecure"},
- }
- case config.DatabaseMySQL:
- port = nat.Port("3306/tcp")
- req = testcontainers.ContainerRequest{
- Image: "mysql:8",
- ExposedPorts: []string{"3306/tcp"},
- WaitingFor: wait.ForSQL(port, "mysql", func(host string, port nat.Port) string {
- return fmt.Sprintf("flipt:password@tcp(%s:%s)/flipt_test?multiStatements=true", host, port.Port())
- }),
- Env: map[string]string{
- "MYSQL_USER": "flipt",
- "MYSQL_PASSWORD": "password",
- "MYSQL_DATABASE": "flipt_test",
- "MYSQL_ALLOW_EMPTY_PASSWORD": "true",
- },
- }
- }
-
- container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{
- ContainerRequest: req,
- Started: true,
- })
- if err != nil {
- return nil, err
- }
-
- if err := container.StartLogProducer(ctx); err != nil {
- return nil, err
- }
-
- verbose, _ := strconv.ParseBool(strings.TrimSpace(os.Getenv("FLIPT_TEST_DATABASE_VERBOSE")))
- if verbose {
- var logger testContainerLogger
- container.FollowOutput(&logger)
- }
-
- mappedPort, err := container.MappedPort(ctx, port)
- if err != nil {
- return nil, err
- }
-
- hostIP, err := container.Host(ctx)
- if err != nil {
- return nil, err
- }
-
- return &DBContainer{Container: container, Host: hostIP, Port: mappedPort.Int()}, nil
-}
-
-type testContainerLogger struct{}
-
-func (t testContainerLogger) Accept(entry testcontainers.Log) {
- log.Println(entry.LogType, ":", string(entry.Content))
-}
-
-func createTempDBPath() string {
- fi, err := os.CreateTemp("", defaultTestDBPrefix)
- if err != nil {
- panic(err)
- }
- _ = fi.Close()
- return fi.Name()
-}
diff --git a/internal/telemetry/telemetry.go b/internal/telemetry/telemetry.go
index ca9142101c..4c812821e7 100644
--- a/internal/telemetry/telemetry.go
+++ b/internal/telemetry/telemetry.go
@@ -12,7 +12,6 @@ import (
"time"
"github.com/google/uuid"
- "github.com/xo/dburl"
"go.flipt.io/flipt/internal/config"
"go.flipt.io/flipt/internal/info"
"go.uber.org/zap"
@@ -32,9 +31,8 @@ type ping struct {
}
type storage struct {
- Type string `json:"type,omitempty"`
- Database string `json:"database,omitempty"`
- Cache string `json:"cache,omitempty"`
+ Type string `json:"type,omitempty"`
+ Cache string `json:"cache,omitempty"`
}
type audit struct {
@@ -200,21 +198,8 @@ func (r *Reporter) ping(_ context.Context, f file) error {
}
)
- dbProtocol := r.cfg.Database.Protocol.String()
-
- if dbProtocol == "" && r.cfg.Database.URL != "" {
- dbProtocol = "unknown"
-
- url, err := dburl.Parse(r.cfg.Database.URL)
- if err == nil {
- // just swallow the error, we don't want to fail telemetry reporting
- dbProtocol = url.Scheme
- }
- }
-
flipt.Storage = &storage{
- Type: string(r.cfg.Storage.Type),
- Database: dbProtocol,
+ Type: string(r.cfg.Storage.Type),
}
// only report cache if enabled
diff --git a/internal/telemetry/telemetry_test.go b/internal/telemetry/telemetry_test.go
index 354b448af7..6e946eecea 100644
--- a/internal/telemetry/telemetry_test.go
+++ b/internal/telemetry/telemetry_test.go
@@ -96,104 +96,17 @@ func TestPing(t *testing.T) {
}{
{
name: "basic",
- cfg: config.Config{
- Database: config.DatabaseConfig{
- Protocol: config.DatabaseSQLite,
- },
- },
- want: map[string]any{
- "version": "1.0.0",
- "os": "linux",
- "arch": "amd64",
- "storage": map[string]any{
- "database": "sqlite",
- },
- "experimental": experimental,
- },
- },
- {
- name: "with db url",
- cfg: config.Config{
- Database: config.DatabaseConfig{
- URL: "sqlite:///foo.db",
- },
- },
- want: map[string]any{
- "version": "1.0.0",
- "os": "linux",
- "arch": "amd64",
- "storage": map[string]any{
- "database": "sqlite",
- },
- "experimental": experimental,
- },
- },
- {
- name: "with unknown db url",
- cfg: config.Config{
- Database: config.DatabaseConfig{
- URL: "foo:///foo.db",
- },
- },
- want: map[string]any{
- "version": "1.0.0",
- "os": "linux",
- "arch": "amd64",
- "storage": map[string]any{
- "database": "unknown",
- },
- "experimental": experimental,
- },
- },
- {
- name: "with cache not enabled",
- cfg: config.Config{
- Database: config.DatabaseConfig{
- Protocol: config.DatabaseSQLite,
- },
- Cache: config.CacheConfig{
- Enabled: false,
- Backend: config.CacheRedis,
- },
- },
- want: map[string]any{
- "version": "1.0.0",
- "os": "linux",
- "arch": "amd64",
- "storage": map[string]any{
- "database": "sqlite",
- },
- "experimental": experimental,
- },
- },
- {
- name: "with cache",
- cfg: config.Config{
- Database: config.DatabaseConfig{
- Protocol: config.DatabaseSQLite,
- },
- Cache: config.CacheConfig{
- Enabled: true,
- Backend: config.CacheRedis,
- },
- },
+
want: map[string]any{
- "version": "1.0.0",
- "os": "linux",
- "arch": "amd64",
- "storage": map[string]any{
- "database": "sqlite",
- "cache": "redis",
- },
+ "version": "1.0.0",
+ "os": "linux",
+ "arch": "amd64",
"experimental": experimental,
},
},
{
name: "with auth not enabled",
cfg: config.Config{
- Database: config.DatabaseConfig{
- Protocol: config.DatabaseSQLite,
- },
Authentication: config.AuthenticationConfig{
Required: false,
Methods: config.AuthenticationMethods{
@@ -204,21 +117,16 @@ func TestPing(t *testing.T) {
},
},
want: map[string]any{
- "version": "1.0.0",
- "os": "linux",
- "arch": "amd64",
- "storage": map[string]any{
- "database": "sqlite",
- },
+ "version": "1.0.0",
+ "os": "linux",
+ "arch": "amd64",
+ "storage": map[string]any{},
"experimental": experimental,
},
},
{
name: "with auth",
cfg: config.Config{
- Database: config.DatabaseConfig{
- Protocol: config.DatabaseSQLite,
- },
Authentication: config.AuthenticationConfig{
Required: false,
Methods: config.AuthenticationMethods{
@@ -232,9 +140,7 @@ func TestPing(t *testing.T) {
"version": "1.0.0",
"os": "linux",
"arch": "amd64",
- "storage": map[string]any{
- "database": "sqlite",
- },
+ "storage": map[string]any{},
"authentication": map[string]any{
"methods": []any{
"token",
@@ -243,171 +149,25 @@ func TestPing(t *testing.T) {
"experimental": experimental,
},
},
- {
- name: "with audit logfile disabled",
- cfg: config.Config{
- Database: config.DatabaseConfig{
- Protocol: config.DatabaseSQLite,
- },
- Audit: config.AuditConfig{
- Sinks: config.SinksConfig{
- Log: config.LogSinkConfig{
- Enabled: false,
- },
- },
- },
- },
- want: map[string]any{
- "version": "1.0.0",
- "os": "linux",
- "arch": "amd64",
- "storage": map[string]any{
- "database": "sqlite",
- },
- "experimental": experimental,
- },
- },
- {
- name: "with audit logfile enabled",
- cfg: config.Config{
- Database: config.DatabaseConfig{
- Protocol: config.DatabaseSQLite,
- },
- Audit: config.AuditConfig{
- Sinks: config.SinksConfig{
- Log: config.LogSinkConfig{
- Enabled: true,
- },
- },
- },
- },
- want: map[string]any{
- "version": "1.0.0",
- "os": "linux",
- "arch": "amd64",
- "storage": map[string]any{
- "database": "sqlite",
- },
- "audit": map[string]any{
- "sinks": []any{
- "log",
- },
- },
- "experimental": experimental,
- },
- },
- {
- name: "with audit webhook disabled",
- cfg: config.Config{
- Database: config.DatabaseConfig{
- Protocol: config.DatabaseSQLite,
- },
- Audit: config.AuditConfig{
- Sinks: config.SinksConfig{
- Webhook: config.WebhookSinkConfig{
- Enabled: false,
- },
- },
- },
- },
- want: map[string]any{
- "version": "1.0.0",
- "os": "linux",
- "arch": "amd64",
- "storage": map[string]any{
- "database": "sqlite",
- },
- "experimental": experimental,
- },
- },
- {
- name: "with audit webhook enabled",
- cfg: config.Config{
- Database: config.DatabaseConfig{
- Protocol: config.DatabaseSQLite,
- },
- Audit: config.AuditConfig{
- Sinks: config.SinksConfig{
- Webhook: config.WebhookSinkConfig{
- Enabled: true,
- },
- },
- },
- },
- want: map[string]any{
- "version": "1.0.0",
- "os": "linux",
- "arch": "amd64",
- "storage": map[string]any{
- "database": "sqlite",
- },
- "audit": map[string]any{
- "sinks": []any{
- "webhook",
- },
- },
- "experimental": experimental,
- },
- },
- {
- name: "with audit logfile and webhook enabled",
- cfg: config.Config{
- Database: config.DatabaseConfig{
- Protocol: config.DatabaseSQLite,
- },
- Audit: config.AuditConfig{
- Sinks: config.SinksConfig{
- Log: config.LogSinkConfig{
- Enabled: true,
- },
- Webhook: config.WebhookSinkConfig{
- Enabled: true,
- },
- },
- },
- },
- want: map[string]any{
- "version": "1.0.0",
- "os": "linux",
- "arch": "amd64",
- "storage": map[string]any{
- "database": "sqlite",
- },
- "audit": map[string]any{
- "sinks": []any{
- "log", "webhook",
- },
- },
- "experimental": experimental,
- },
- },
{
name: "with tracing not enabled",
cfg: config.Config{
- Database: config.DatabaseConfig{
- Protocol: config.DatabaseSQLite,
- },
Tracing: config.TracingConfig{
Enabled: false,
Exporter: config.TracingOTLP,
},
},
want: map[string]any{
- "version": "1.0.0",
- "os": "linux",
- "arch": "amd64",
- "storage": map[string]any{
- "database": "sqlite",
- },
+ "version": "1.0.0",
+ "os": "linux",
+ "arch": "amd64",
+ "storage": map[string]any{},
"experimental": experimental,
},
},
{
name: "with tracing enabled",
cfg: config.Config{
- Database: config.DatabaseConfig{
- Protocol: config.DatabaseSQLite,
- },
Tracing: config.TracingConfig{
Enabled: true,
Exporter: config.TracingOTLP,
@@ -417,9 +177,7 @@ func TestPing(t *testing.T) {
"version": "1.0.0",
"os": "linux",
"arch": "amd64",
- "storage": map[string]any{
- "database": "sqlite",
- },
+ "storage": map[string]any{},
"tracing": map[string]any{
"exporter": "otlp",
},
@@ -429,9 +187,6 @@ func TestPing(t *testing.T) {
{
name: "with analytics not enabled",
cfg: config.Config{
- Database: config.DatabaseConfig{
- Protocol: config.DatabaseSQLite,
- },
Analytics: config.AnalyticsConfig{
Storage: config.AnalyticsStorageConfig{
Clickhouse: config.ClickhouseConfig{
@@ -442,21 +197,16 @@ func TestPing(t *testing.T) {
},
},
want: map[string]any{
- "version": "1.0.0",
- "os": "linux",
- "arch": "amd64",
- "storage": map[string]any{
- "database": "sqlite",
- },
+ "version": "1.0.0",
+ "os": "linux",
+ "arch": "amd64",
+ "storage": map[string]any{},
"experimental": experimental,
},
},
{
name: "with analytics enabled",
cfg: config.Config{
- Database: config.DatabaseConfig{
- Protocol: config.DatabaseSQLite,
- },
Analytics: config.AnalyticsConfig{
Storage: config.AnalyticsStorageConfig{
Clickhouse: config.ClickhouseConfig{
@@ -470,9 +220,7 @@ func TestPing(t *testing.T) {
"version": "1.0.0",
"os": "linux",
"arch": "amd64",
- "storage": map[string]any{
- "database": "sqlite",
- },
+ "storage": map[string]any{},
"analytics": map[string]any{
"storage": "clickhouse",
},
diff --git a/logos/cockroachdb.svg b/logos/cockroachdb.svg
deleted file mode 100644
index 6e8eca8e06..0000000000
--- a/logos/cockroachdb.svg
+++ /dev/null
@@ -1 +0,0 @@
-CL
\ No newline at end of file
diff --git a/logos/mysql.svg b/logos/mysql.svg
deleted file mode 100644
index 853c55f150..0000000000
--- a/logos/mysql.svg
+++ /dev/null
@@ -1 +0,0 @@
-
\ No newline at end of file
diff --git a/logos/postgresql.svg b/logos/postgresql.svg
deleted file mode 100644
index ae38cff586..0000000000
--- a/logos/postgresql.svg
+++ /dev/null
@@ -1 +0,0 @@
-
\ No newline at end of file
diff --git a/logos/sqlite.svg b/logos/sqlite.svg
deleted file mode 100644
index 670aa6eb82..0000000000
--- a/logos/sqlite.svg
+++ /dev/null
@@ -1 +0,0 @@
-
\ No newline at end of file
diff --git a/logos/turso.svg b/logos/turso.svg
deleted file mode 100644
index d23e0cc385..0000000000
--- a/logos/turso.svg
+++ /dev/null
@@ -1,8 +0,0 @@
-
-
-
-
-
-
-
-
\ No newline at end of file