diff --git a/.github/images/preview.png b/.github/images/preview.png
deleted file mode 100644
index 1cbbbac..0000000
Binary files a/.github/images/preview.png and /dev/null differ
diff --git a/.github/workflows/CODEOWNERS b/.github/workflows/CODEOWNERS
deleted file mode 100644
index bbd74e7..0000000
--- a/.github/workflows/CODEOWNERS
+++ /dev/null
@@ -1 +0,0 @@
-* @santiagosayshey
\ No newline at end of file
diff --git a/.github/workflows/beta-build.yml b/.github/workflows/beta-build.yml
deleted file mode 100644
index 9555722..0000000
--- a/.github/workflows/beta-build.yml
+++ /dev/null
@@ -1,57 +0,0 @@
-name: Build Beta Docker Image
-
-on:
- push:
- branches:
- - dev
- pull_request:
- branches:
- - dev
-
-jobs:
- build:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v3
- with:
- platforms: linux/amd64,linux/arm64/v8
-
- - name: Set up Node.js
- uses: actions/setup-node@v4
- with:
- node-version: '18'
-
- - name: Build frontend
- working-directory: ./frontend
- run: |
- npm ci
- npm run build
-
- - name: Prepare dist directory
- run: |
- mkdir -p dist/backend dist/static
- cp -r frontend/dist/* dist/static/
- cp -r backend/* dist/backend/
- cp backend/requirements.txt dist/
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
-
- - name: Login to Docker Hub
- if: github.event_name != 'pull_request'
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKERHUB_USERNAME }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
-
- - name: Build and push
- uses: docker/build-push-action@v5
- with:
- platforms: linux/amd64,linux/arm64/v8
- context: .
- push: ${{ github.event_name != 'pull_request' }}
- tags: santiagosayshey/profilarr:beta
diff --git a/.github/workflows/notify.yml b/.github/workflows/notify.yml
deleted file mode 100644
index df8e7a1..0000000
--- a/.github/workflows/notify.yml
+++ /dev/null
@@ -1,11 +0,0 @@
-name: Release Notification
-
-on:
- release:
- types: [published]
-
-jobs:
- call-notify-release:
- uses: Dictionarry-Hub/parrot/.github/workflows/notify-release.yml@v1
- secrets:
- PARROT_URL: ${{ secrets.PARROT_URL }}
diff --git a/.github/workflows/release-build.yml b/.github/workflows/release-build.yml
deleted file mode 100644
index aebdf99..0000000
--- a/.github/workflows/release-build.yml
+++ /dev/null
@@ -1,59 +0,0 @@
-name: Build Release Docker Image
-
-on:
- push:
- tags:
- - 'v*'
-
-jobs:
- build:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Get tag
- id: tag
- run: echo "tag=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
-
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v3
- with:
- platforms: linux/amd64,linux/arm64/v8
-
- - name: Set up Node.js
- uses: actions/setup-node@v4
- with:
- node-version: '18'
-
- - name: Build frontend
- working-directory: ./frontend
- run: |
- npm ci
- npm run build
-
- - name: Prepare dist directory
- run: |
- mkdir -p dist/backend dist/static
- cp -r frontend/dist/* dist/static/
- cp -r backend/* dist/backend/
- cp backend/requirements.txt dist/
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
-
- - name: Login to Docker Hub
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKERHUB_USERNAME }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
-
- - name: Build and push
- uses: docker/build-push-action@v5
- with:
- context: .
- platforms: linux/amd64,linux/arm64/v8
- push: true
- tags: |
- santiagosayshey/profilarr:latest
- santiagosayshey/profilarr:${{ steps.tag.outputs.tag }}
diff --git a/.gitignore b/.gitignore
deleted file mode 100644
index d6171fe..0000000
--- a/.gitignore
+++ /dev/null
@@ -1,26 +0,0 @@
-# Node
-node_modules/
-dist/
-
-# Python
-__pycache__/
-*.pyc
-
-# Environment variables
-.env
-.env.prod
-.env.1
-.env.2
-
-# OS files
-.DS_Store
-
-# build files
-backend/app/static/
-
-# Config data
-config/
-config-test/
-radarr-config/
-sonarr-config/
-test-data/
\ No newline at end of file
diff --git a/.prettierrc b/.prettierrc
deleted file mode 100644
index 89c5908..0000000
--- a/.prettierrc
+++ /dev/null
@@ -1,12 +0,0 @@
-{
- "tabWidth": 4,
- "useTabs": false,
- "printWidth": 80,
- "singleQuote": true,
- "trailingComma": "none",
- "bracketSpacing": false,
- "jsxSingleQuote": true,
- "arrowParens": "avoid",
- "proseWrap": "preserve",
- "bracketSameLine": true
-}
diff --git a/CLAUDE.md b/CLAUDE.md
deleted file mode 100644
index 22d3bf1..0000000
--- a/CLAUDE.md
+++ /dev/null
@@ -1,25 +0,0 @@
-# Profilarr Development Guide
-
-## Commands
-- **Frontend**: `cd frontend && npm run dev` - Start React dev server
-- **Backend**: `cd backend && gunicorn -b 0.0.0.0:5000 app.main:app` - Run Flask server
-- **Docker**: `docker compose up` - Start both frontend/backend in dev mode
-- **Lint**: `cd frontend && npx eslint 'src/**/*.{js,jsx}'` - Check frontend code style
-- **Build**: `cd frontend && npm run build` - Build for production
-
-## Code Style
-### Frontend (React)
-- **Imports**: React first, third-party libs next, components, then utils
-- **Components**: Functional components with hooks, PascalCase naming
-- **Props**: PropTypes for validation, destructure props in component signature
-- **State**: Group related state, useCallback for memoized handlers
-- **JSX**: 4-space indentation, attributes on new lines for readability
-- **Error Handling**: try/catch for async operations, toast notifications
-
-### Backend (Python)
-- **Imports**: Standard lib first, third-party next, local modules last
-- **Naming**: snake_case for functions/vars/files, PascalCase for classes
-- **Functions**: Single responsibility, descriptive docstrings
-- **Error Handling**: Specific exception catches, return (success, message) tuples
-- **Indentation**: 4 spaces consistently
-- **Modularity**: Related functionality grouped in directories
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
deleted file mode 100644
index e72bfdd..0000000
--- a/LICENSE
+++ /dev/null
@@ -1,674 +0,0 @@
- GNU GENERAL PUBLIC LICENSE
- Version 3, 29 June 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc.
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
- Preamble
-
- The GNU General Public License is a free, copyleft license for
-software and other kinds of works.
-
- The licenses for most software and other practical works are designed
-to take away your freedom to share and change the works. By contrast,
-the GNU General Public License is intended to guarantee your freedom to
-share and change all versions of a program--to make sure it remains free
-software for all its users. We, the Free Software Foundation, use the
-GNU General Public License for most of our software; it applies also to
-any other work released this way by its authors. You can apply it to
-your programs, too.
-
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-them if you wish), that you receive source code or can get it if you
-want it, that you can change the software or use pieces of it in new
-free programs, and that you know you can do these things.
-
- To protect your rights, we need to prevent others from denying you
-these rights or asking you to surrender the rights. Therefore, you have
-certain responsibilities if you distribute copies of the software, or if
-you modify it: responsibilities to respect the freedom of others.
-
- For example, if you distribute copies of such a program, whether
-gratis or for a fee, you must pass on to the recipients the same
-freedoms that you received. You must make sure that they, too, receive
-or can get the source code. And you must show them these terms so they
-know their rights.
-
- Developers that use the GNU GPL protect your rights with two steps:
-(1) assert copyright on the software, and (2) offer you this License
-giving you legal permission to copy, distribute and/or modify it.
-
- For the developers' and authors' protection, the GPL clearly explains
-that there is no warranty for this free software. For both users' and
-authors' sake, the GPL requires that modified versions be marked as
-changed, so that their problems will not be attributed erroneously to
-authors of previous versions.
-
- Some devices are designed to deny users access to install or run
-modified versions of the software inside them, although the manufacturer
-can do so. This is fundamentally incompatible with the aim of
-protecting users' freedom to change the software. The systematic
-pattern of such abuse occurs in the area of products for individuals to
-use, which is precisely where it is most unacceptable. Therefore, we
-have designed this version of the GPL to prohibit the practice for those
-products. If such problems arise substantially in other domains, we
-stand ready to extend this provision to those domains in future versions
-of the GPL, as needed to protect the freedom of users.
-
- Finally, every program is threatened constantly by software patents.
-States should not allow patents to restrict development and use of
-software on general-purpose computers, but in those that do, we wish to
-avoid the special danger that patents applied to a free program could
-make it effectively proprietary. To prevent this, the GPL assures that
-patents cannot be used to render the program non-free.
-
- The precise terms and conditions for copying, distribution and
-modification follow.
-
- TERMS AND CONDITIONS
-
- 0. Definitions.
-
- "This License" refers to version 3 of the GNU General Public License.
-
- "Copyright" also means copyright-like laws that apply to other kinds of
-works, such as semiconductor masks.
-
- "The Program" refers to any copyrightable work licensed under this
-License. Each licensee is addressed as "you". "Licensees" and
-"recipients" may be individuals or organizations.
-
- To "modify" a work means to copy from or adapt all or part of the work
-in a fashion requiring copyright permission, other than the making of an
-exact copy. The resulting work is called a "modified version" of the
-earlier work or a work "based on" the earlier work.
-
- A "covered work" means either the unmodified Program or a work based
-on the Program.
-
- To "propagate" a work means to do anything with it that, without
-permission, would make you directly or secondarily liable for
-infringement under applicable copyright law, except executing it on a
-computer or modifying a private copy. Propagation includes copying,
-distribution (with or without modification), making available to the
-public, and in some countries other activities as well.
-
- To "convey" a work means any kind of propagation that enables other
-parties to make or receive copies. Mere interaction with a user through
-a computer network, with no transfer of a copy, is not conveying.
-
- An interactive user interface displays "Appropriate Legal Notices"
-to the extent that it includes a convenient and prominently visible
-feature that (1) displays an appropriate copyright notice, and (2)
-tells the user that there is no warranty for the work (except to the
-extent that warranties are provided), that licensees may convey the
-work under this License, and how to view a copy of this License. If
-the interface presents a list of user commands or options, such as a
-menu, a prominent item in the list meets this criterion.
-
- 1. Source Code.
-
- The "source code" for a work means the preferred form of the work
-for making modifications to it. "Object code" means any non-source
-form of a work.
-
- A "Standard Interface" means an interface that either is an official
-standard defined by a recognized standards body, or, in the case of
-interfaces specified for a particular programming language, one that
-is widely used among developers working in that language.
-
- The "System Libraries" of an executable work include anything, other
-than the work as a whole, that (a) is included in the normal form of
-packaging a Major Component, but which is not part of that Major
-Component, and (b) serves only to enable use of the work with that
-Major Component, or to implement a Standard Interface for which an
-implementation is available to the public in source code form. A
-"Major Component", in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system
-(if any) on which the executable work runs, or a compiler used to
-produce the work, or an object code interpreter used to run it.
-
- The "Corresponding Source" for a work in object code form means all
-the source code needed to generate, install, and (for an executable
-work) run the object code and to modify the work, including scripts to
-control those activities. However, it does not include the work's
-System Libraries, or general-purpose tools or generally available free
-programs which are used unmodified in performing those activities but
-which are not part of the work. For example, Corresponding Source
-includes interface definition files associated with source files for
-the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require,
-such as by intimate data communication or control flow between those
-subprograms and other parts of the work.
-
- The Corresponding Source need not include anything that users
-can regenerate automatically from other parts of the Corresponding
-Source.
-
- The Corresponding Source for a work in source code form is that
-same work.
-
- 2. Basic Permissions.
-
- All rights granted under this License are granted for the term of
-copyright on the Program, and are irrevocable provided the stated
-conditions are met. This License explicitly affirms your unlimited
-permission to run the unmodified Program. The output from running a
-covered work is covered by this License only if the output, given its
-content, constitutes a covered work. This License acknowledges your
-rights of fair use or other equivalent, as provided by copyright law.
-
- You may make, run and propagate covered works that you do not
-convey, without conditions so long as your license otherwise remains
-in force. You may convey covered works to others for the sole purpose
-of having them make modifications exclusively for you, or provide you
-with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do
-not control copyright. Those thus making or running the covered works
-for you must do so exclusively on your behalf, under your direction
-and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
- Conveying under any other circumstances is permitted solely under
-the conditions stated below. Sublicensing is not allowed; section 10
-makes it unnecessary.
-
- 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
- No covered work shall be deemed part of an effective technological
-measure under any applicable law fulfilling obligations under article
-11 of the WIPO copyright treaty adopted on 20 December 1996, or
-similar laws prohibiting or restricting circumvention of such
-measures.
-
- When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention
-is effected by exercising rights under this License with respect to
-the covered work, and you disclaim any intention to limit operation or
-modification of the work as a means of enforcing, against the work's
-users, your or third parties' legal rights to forbid circumvention of
-technological measures.
-
- 4. Conveying Verbatim Copies.
-
- You may convey verbatim copies of the Program's source code as you
-receive it, in any medium, provided that you conspicuously and
-appropriately publish on each copy an appropriate copyright notice;
-keep intact all notices stating that this License and any
-non-permissive terms added in accord with section 7 apply to the code;
-keep intact all notices of the absence of any warranty; and give all
-recipients a copy of this License along with the Program.
-
- You may charge any price or no price for each copy that you convey,
-and you may offer support or warranty protection for a fee.
-
- 5. Conveying Modified Source Versions.
-
- You may convey a work based on the Program, or the modifications to
-produce it from the Program, in the form of source code under the
-terms of section 4, provided that you also meet all of these conditions:
-
- a) The work must carry prominent notices stating that you modified
- it, and giving a relevant date.
-
- b) The work must carry prominent notices stating that it is
- released under this License and any conditions added under section
- 7. This requirement modifies the requirement in section 4 to
- "keep intact all notices".
-
- c) You must license the entire work, as a whole, under this
- License to anyone who comes into possession of a copy. This
- License will therefore apply, along with any applicable section 7
- additional terms, to the whole of the work, and all its parts,
- regardless of how they are packaged. This License gives no
- permission to license the work in any other way, but it does not
- invalidate such permission if you have separately received it.
-
- d) If the work has interactive user interfaces, each must display
- Appropriate Legal Notices; however, if the Program has interactive
- interfaces that do not display Appropriate Legal Notices, your
- work need not make them do so.
-
- A compilation of a covered work with other separate and independent
-works, which are not by their nature extensions of the covered work,
-and which are not combined with it such as to form a larger program,
-in or on a volume of a storage or distribution medium, is called an
-"aggregate" if the compilation and its resulting copyright are not
-used to limit the access or legal rights of the compilation's users
-beyond what the individual works permit. Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
- 6. Conveying Non-Source Forms.
-
- You may convey a covered work in object code form under the terms
-of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License,
-in one of these ways:
-
- a) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by the
- Corresponding Source fixed on a durable physical medium
- customarily used for software interchange.
-
- b) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by a
- written offer, valid for at least three years and valid for as
- long as you offer spare parts or customer support for that product
- model, to give anyone who possesses the object code either (1) a
- copy of the Corresponding Source for all the software in the
- product that is covered by this License, on a durable physical
- medium customarily used for software interchange, for a price no
- more than your reasonable cost of physically performing this
- conveying of source, or (2) access to copy the
- Corresponding Source from a network server at no charge.
-
- c) Convey individual copies of the object code with a copy of the
- written offer to provide the Corresponding Source. This
- alternative is allowed only occasionally and noncommercially, and
- only if you received the object code with such an offer, in accord
- with subsection 6b.
-
- d) Convey the object code by offering access from a designated
- place (gratis or for a charge), and offer equivalent access to the
- Corresponding Source in the same way through the same place at no
- further charge. You need not require recipients to copy the
- Corresponding Source along with the object code. If the place to
- copy the object code is a network server, the Corresponding Source
- may be on a different server (operated by you or a third party)
- that supports equivalent copying facilities, provided you maintain
- clear directions next to the object code saying where to find the
- Corresponding Source. Regardless of what server hosts the
- Corresponding Source, you remain obligated to ensure that it is
- available for as long as needed to satisfy these requirements.
-
- e) Convey the object code using peer-to-peer transmission, provided
- you inform other peers where the object code and Corresponding
- Source of the work are being offered to the general public at no
- charge under subsection 6d.
-
- A separable portion of the object code, whose source code is excluded
-from the Corresponding Source as a System Library, need not be
-included in conveying the object code work.
-
- A "User Product" is either (1) a "consumer product", which means any
-tangible personal property which is normally used for personal, family,
-or household purposes, or (2) anything designed or sold for incorporation
-into a dwelling. In determining whether a product is a consumer product,
-doubtful cases shall be resolved in favor of coverage. For a particular
-product received by a particular user, "normally used" refers to a
-typical or common use of that class of product, regardless of the status
-of the particular user or of the way in which the particular user
-actually uses, or expects or is expected to use, the product. A product
-is a consumer product regardless of whether the product has substantial
-commercial, industrial or non-consumer uses, unless such uses represent
-the only significant mode of use of the product.
-
- "Installation Information" for a User Product means any methods,
-procedures, authorization keys, or other information required to install
-and execute modified versions of a covered work in that User Product from
-a modified version of its Corresponding Source. The information must
-suffice to ensure that the continued functioning of the modified object
-code is in no case prevented or interfered with solely because
-modification has been made.
-
- If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as
-part of a transaction in which the right of possession and use of the
-User Product is transferred to the recipient in perpetuity or for a
-fixed term (regardless of how the transaction is characterized), the
-Corresponding Source conveyed under this section must be accompanied
-by the Installation Information. But this requirement does not apply
-if neither you nor any third party retains the ability to install
-modified object code on the User Product (for example, the work has
-been installed in ROM).
-
- The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates
-for a work that has been modified or installed by the recipient, or for
-the User Product in which it has been modified or installed. Access to a
-network may be denied when the modification itself materially and
-adversely affects the operation of the network or violates the rules and
-protocols for communication across the network.
-
- Corresponding Source conveyed, and Installation Information provided,
-in accord with this section must be in a format that is publicly
-documented (and with an implementation available to the public in
-source code form), and must require no special password or key for
-unpacking, reading or copying.
-
- 7. Additional Terms.
-
- "Additional permissions" are terms that supplement the terms of this
-License by making exceptions from one or more of its conditions.
-Additional permissions that are applicable to the entire Program shall
-be treated as though they were included in this License, to the extent
-that they are valid under applicable law. If additional permissions
-apply only to part of the Program, that part may be used separately
-under those permissions, but the entire Program remains governed by
-this License without regard to the additional permissions.
-
- When you convey a copy of a covered work, you may at your option
-remove any additional permissions from that copy, or from any part of
-it. (Additional permissions may be written to require their own
-removal in certain cases when you modify the work.) You may place
-additional permissions on material, added by you to a covered work,
-for which you have or can give appropriate copyright permission.
-
- Notwithstanding any other provision of this License, for material you
-add to a covered work, you may (if authorized by the copyright holders of
-that material) supplement the terms of this License with terms:
-
- a) Disclaiming warranty or limiting liability differently from the
- terms of sections 15 and 16 of this License; or
-
- b) Requiring preservation of specified reasonable legal notices or
- author attributions in that material or in the Appropriate Legal
- Notices displayed by works containing it; or
-
- c) Prohibiting misrepresentation of the origin of that material, or
- requiring that modified versions of such material be marked in
- reasonable ways as different from the original version; or
-
- d) Limiting the use for publicity purposes of names of licensors or
- authors of the material; or
-
- e) Declining to grant rights under trademark law for use of some
- trade names, trademarks, or service marks; or
-
- f) Requiring indemnification of licensors and authors of that
- material by anyone who conveys the material (or modified versions of
- it) with contractual assumptions of liability to the recipient, for
- any liability that these contractual assumptions directly impose on
- those licensors and authors.
-
- All other non-permissive additional terms are considered "further
-restrictions" within the meaning of section 10. If the Program as you
-received it, or any part of it, contains a notice stating that it is
-governed by this License along with a term that is a further
-restriction, you may remove that term. If a license document contains
-a further restriction but permits relicensing or conveying under this
-License, you may add to a covered work material governed by the terms
-of that license document, provided that the further restriction does
-not survive such relicensing or conveying.
-
- If you add terms to a covered work in accord with this section, you
-must place, in the relevant source files, a statement of the
-additional terms that apply to those files, or a notice indicating
-where to find the applicable terms.
-
- Additional terms, permissive or non-permissive, may be stated in the
-form of a separately written license, or stated as exceptions;
-the above requirements apply either way.
-
- 8. Termination.
-
- You may not propagate or modify a covered work except as expressly
-provided under this License. Any attempt otherwise to propagate or
-modify it is void, and will automatically terminate your rights under
-this License (including any patent licenses granted under the third
-paragraph of section 11).
-
- However, if you cease all violation of this License, then your
-license from a particular copyright holder is reinstated (a)
-provisionally, unless and until the copyright holder explicitly and
-finally terminates your license, and (b) permanently, if the copyright
-holder fails to notify you of the violation by some reasonable means
-prior to 60 days after the cessation.
-
- Moreover, your license from a particular copyright holder is
-reinstated permanently if the copyright holder notifies you of the
-violation by some reasonable means, this is the first time you have
-received notice of violation of this License (for any work) from that
-copyright holder, and you cure the violation prior to 30 days after
-your receipt of the notice.
-
- Termination of your rights under this section does not terminate the
-licenses of parties who have received copies or rights from you under
-this License. If your rights have been terminated and not permanently
-reinstated, you do not qualify to receive new licenses for the same
-material under section 10.
-
- 9. Acceptance Not Required for Having Copies.
-
- You are not required to accept this License in order to receive or
-run a copy of the Program. Ancillary propagation of a covered work
-occurring solely as a consequence of using peer-to-peer transmission
-to receive a copy likewise does not require acceptance. However,
-nothing other than this License grants you permission to propagate or
-modify any covered work. These actions infringe copyright if you do
-not accept this License. Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
- 10. Automatic Licensing of Downstream Recipients.
-
- Each time you convey a covered work, the recipient automatically
-receives a license from the original licensors, to run, modify and
-propagate that work, subject to this License. You are not responsible
-for enforcing compliance by third parties with this License.
-
- An "entity transaction" is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations. If propagation of a covered
-work results from an entity transaction, each party to that
-transaction who receives a copy of the work also receives whatever
-licenses to the work the party's predecessor in interest had or could
-give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if
-the predecessor has it or can get it with reasonable efforts.
-
- You may not impose any further restrictions on the exercise of the
-rights granted or affirmed under this License. For example, you may
-not impose a license fee, royalty, or other charge for exercise of
-rights granted under this License, and you may not initiate litigation
-(including a cross-claim or counterclaim in a lawsuit) alleging that
-any patent claim is infringed by making, using, selling, offering for
-sale, or importing the Program or any portion of it.
-
- 11. Patents.
-
- A "contributor" is a copyright holder who authorizes use under this
-License of the Program or a work on which the Program is based. The
-work thus licensed is called the contributor's "contributor version".
-
- A contributor's "essential patent claims" are all patent claims
-owned or controlled by the contributor, whether already acquired or
-hereafter acquired, that would be infringed by some manner, permitted
-by this License, of making, using, or selling its contributor version,
-but do not include claims that would be infringed only as a
-consequence of further modification of the contributor version. For
-purposes of this definition, "control" includes the right to grant
-patent sublicenses in a manner consistent with the requirements of
-this License.
-
- Each contributor grants you a non-exclusive, worldwide, royalty-free
-patent license under the contributor's essential patent claims, to
-make, use, sell, offer for sale, import and otherwise run, modify and
-propagate the contents of its contributor version.
-
- In the following three paragraphs, a "patent license" is any express
-agreement or commitment, however denominated, not to enforce a patent
-(such as an express permission to practice a patent or covenant not to
-sue for patent infringement). To "grant" such a patent license to a
-party means to make such an agreement or commitment not to enforce a
-patent against the party.
-
- If you convey a covered work, knowingly relying on a patent license,
-and the Corresponding Source of the work is not available for anyone
-to copy, free of charge and under the terms of this License, through a
-publicly available network server or other readily accessible means,
-then you must either (1) cause the Corresponding Source to be so
-available, or (2) arrange to deprive yourself of the benefit of the
-patent license for this particular work, or (3) arrange, in a manner
-consistent with the requirements of this License, to extend the patent
-license to downstream recipients. "Knowingly relying" means you have
-actual knowledge that, but for the patent license, your conveying the
-covered work in a country, or your recipient's use of the covered work
-in a country, would infringe one or more identifiable patents in that
-country that you have reason to believe are valid.
-
- If, pursuant to or in connection with a single transaction or
-arrangement, you convey, or propagate by procuring conveyance of, a
-covered work, and grant a patent license to some of the parties
-receiving the covered work authorizing them to use, propagate, modify
-or convey a specific copy of the covered work, then the patent license
-you grant is automatically extended to all recipients of the covered
-work and works based on it.
-
- A patent license is "discriminatory" if it does not include within
-the scope of its coverage, prohibits the exercise of, or is
-conditioned on the non-exercise of one or more of the rights that are
-specifically granted under this License. You may not convey a covered
-work if you are a party to an arrangement with a third party that is
-in the business of distributing software, under which you make payment
-to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory
-patent license (a) in connection with copies of the covered work
-conveyed by you (or copies made from those copies), or (b) primarily
-for and in connection with specific products or compilations that
-contain the covered work, unless you entered into that arrangement,
-or that patent license was granted, prior to 28 March 2007.
-
- Nothing in this License shall be construed as excluding or limiting
-any implied license or other defenses to infringement that may
-otherwise be available to you under applicable patent law.
-
- 12. No Surrender of Others' Freedom.
-
- If conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot convey a
-covered work so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you may
-not convey it at all. For example, if you agree to terms that obligate you
-to collect a royalty for further conveying from those to whom you convey
-the Program, the only way you could satisfy both those terms and this
-License would be to refrain entirely from conveying the Program.
-
- 13. Use with the GNU Affero General Public License.
-
- Notwithstanding any other provision of this License, you have
-permission to link or combine any covered work with a work licensed
-under version 3 of the GNU Affero General Public License into a single
-combined work, and to convey the resulting work. The terms of this
-License will continue to apply to the part which is the covered work,
-but the special requirements of the GNU Affero General Public License,
-section 13, concerning interaction through a network will apply to the
-combination as such.
-
- 14. Revised Versions of this License.
-
- The Free Software Foundation may publish revised and/or new versions of
-the GNU General Public License from time to time. Such new versions will
-be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
- Each version is given a distinguishing version number. If the
-Program specifies that a certain numbered version of the GNU General
-Public License "or any later version" applies to it, you have the
-option of following the terms and conditions either of that numbered
-version or of any later version published by the Free Software
-Foundation. If the Program does not specify a version number of the
-GNU General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
- If the Program specifies that a proxy can decide which future
-versions of the GNU General Public License can be used, that proxy's
-public statement of acceptance of a version permanently authorizes you
-to choose that version for the Program.
-
- Later license versions may give you additional or different
-permissions. However, no additional obligations are imposed on any
-author or copyright holder as a result of your choosing to follow a
-later version.
-
- 15. Disclaimer of Warranty.
-
- THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
-APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
-HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
-OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
-IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
-ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
- 16. Limitation of Liability.
-
- IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
-THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
-GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
-USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
-DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
-PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
-EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGES.
-
- 17. Interpretation of Sections 15 and 16.
-
- If the disclaimer of warranty and limitation of liability provided
-above cannot be given local legal effect according to their terms,
-reviewing courts shall apply local law that most closely approximates
-an absolute waiver of all civil liability in connection with the
-Program, unless a warranty or assumption of liability accompanies a
-copy of the Program in return for a fee.
-
- END OF TERMS AND CONDITIONS
-
- How to Apply These Terms to Your New Programs
-
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-state the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
-
- Copyright (C)
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see .
-
-Also add information on how to contact you by electronic and paper mail.
-
- If the program does terminal interaction, make it output a short
-notice like this when it starts in an interactive mode:
-
- Copyright (C)
- This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w' and `show c' should show the appropriate
-parts of the General Public License. Of course, your program's commands
-might be different; for a GUI interface, you would use an "about box".
-
- You should also get your employer (if you work as a programmer) or school,
-if any, to sign a "copyright disclaimer" for the program, if necessary.
-For more information on this, and how to apply and follow the GNU GPL, see
-.
-
- The GNU General Public License does not permit incorporating your program
-into proprietary programs. If your program is a subroutine library, you
-may consider it more useful to permit linking proprietary applications with
-the library. If this is what you want to do, use the GNU Lesser General
-Public License instead of this License. But first, please read
-.
\ No newline at end of file
diff --git a/backend/__init__.py b/backend/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/backend/app/__init__.py b/backend/app/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/backend/app/arr/__init__.py b/backend/app/arr/__init__.py
deleted file mode 100644
index 274154c..0000000
--- a/backend/app/arr/__init__.py
+++ /dev/null
@@ -1,188 +0,0 @@
-from flask import Blueprint, request, jsonify
-from flask_cors import cross_origin
-import logging
-from .status.ping import ping_service
-from .manager import (save_arr_config, get_all_arr_configs, get_arr_config,
- update_arr_config, delete_arr_config)
-
-logger = logging.getLogger(__name__)
-logger.setLevel(logging.ERROR)
-
-bp = Blueprint('arr', __name__)
-
-
-@bp.route('/ping', methods=['POST', 'OPTIONS'])
-@cross_origin()
-def ping():
- if request.method == 'OPTIONS':
- return jsonify({}), 200
- data = request.get_json()
- url = data.get('url')
- api_key = data.get('apiKey')
- arr_type = data.get('type')
-
- if not url or not api_key or not arr_type:
- return jsonify({
- 'success': False,
- 'error': 'URL, API key, and type are required'
- }), 400
-
- logger.error(f"Attempting to ping URL: {url} of type: {arr_type}")
- success, message = ping_service(url, api_key, arr_type)
- logger.error(f"Ping result - Success: {success}, Message: {message}")
-
- return jsonify({
- 'success': success,
- 'message': message
- }), 200 if success else 400
-
-
-@bp.route('/config', methods=['POST', 'OPTIONS'])
-@cross_origin()
-def add_config():
- if request.method == 'OPTIONS':
- return jsonify({}), 200
-
- try:
- config = request.json
-
- # Validate sync_interval if schedule method
- if config.get('sync_method') == 'schedule':
- sync_interval = config.get('sync_interval', 0)
- if sync_interval < 60 or sync_interval > 43200:
- return jsonify({
- 'success': False,
- 'error': 'Sync interval must be between 60 minutes (1 hour) and 43200 minutes (1 month)'
- }), 400
-
- result = save_arr_config(config)
-
- # Handle the conflict case first
- if not result['success'] and result.get('status_code') == 409:
- return jsonify({'success': False, 'error': result['error']}), 409
-
- # Handle other failure cases
- if not result['success']:
- return jsonify(result), 400
-
- return jsonify(result), 200
- except Exception as e:
- logger.error(f"Error saving arr config: {str(e)}")
- return jsonify({'success': False, 'error': str(e)}), 400
-
-
-@bp.route('/config', methods=['GET', 'OPTIONS'])
-@cross_origin()
-def get_configs():
- if request.method == 'OPTIONS':
- return jsonify({}), 200
-
- try:
- configs = get_all_arr_configs()
- logger.debug(f"Retrieved {len(configs)} arr configs")
- return jsonify(configs), 200
- except Exception as e:
- logger.error(f"Error getting arr configs: {str(e)}")
- return jsonify({'success': False, 'error': str(e)}), 400
-
-
-@bp.route('/config/', methods=['GET', 'PUT', 'DELETE', 'OPTIONS'])
-@cross_origin()
-def handle_config(id):
- if request.method == 'OPTIONS':
- return jsonify({}), 200
-
- try:
- if request.method == 'GET':
- config = get_arr_config(id)
- if config:
- logger.debug(f"Retrieved arr config: {id}")
- return jsonify({'success': True, 'data': config}), 200
- logger.debug(f"Arr config not found: {id}")
- return jsonify({
- 'success': False,
- 'error': 'Config not found'
- }), 404
-
- elif request.method == 'PUT':
- config = request.json
-
- # Validate sync_interval if schedule method
- if config.get('sync_method') == 'schedule':
- sync_interval = config.get('sync_interval', 0)
- if sync_interval < 60 or sync_interval > 43200:
- return jsonify({
- 'success': False,
- 'error': 'Sync interval must be between 60 minutes (1 hour) and 43200 minutes (1 month)'
- }), 400
-
- result = update_arr_config(id, config)
-
- # Handle the conflict case first
- if not result['success'] and result.get('status_code') == 409:
- return jsonify({
- 'success': False,
- 'error': result['error']
- }), 409
-
- # Handle other failure cases
- if not result['success']:
- logger.debug(f"Arr config not found for update: {id}")
- return jsonify({
- 'success': False,
- 'error': 'Config not found'
- }), 404
-
- logger.debug(f"Updated arr config: {id}")
- return jsonify({'success': True}), 200
-
- elif request.method == 'DELETE':
- success = delete_arr_config(id)
- if success:
- logger.debug(f"Deleted arr config: {id}")
- return jsonify({'success': True}), 200
- logger.debug(f"Arr config not found for deletion: {id}")
- return jsonify({
- 'success': False,
- 'error': 'Config not found'
- }), 404
-
- except Exception as e:
- logger.error(f"Error handling arr config {id}: {str(e)}")
- return jsonify({'success': False, 'error': str(e)}), 400
-
-
-@bp.route('/config//sync', methods=['POST', 'OPTIONS'])
-@cross_origin()
-def trigger_sync(id):
- if request.method == 'OPTIONS':
- return jsonify({}), 200
-
- try:
- # Get the config first
- config_result = get_arr_config(id)
- if not config_result.get('success'):
- logger.error(f"Config not found for sync: {id}")
- return jsonify({
- 'success': False,
- 'error': 'Configuration not found'
- }), 404
-
- config_data = config_result.get('data')
- if not config_data:
- logger.error(f"Invalid config data for sync: {id}")
- return jsonify({
- 'success': False,
- 'error': 'Invalid configuration data'
- }), 400
-
- # Run the import
- from ..importer import handle_pull_import
- handle_pull_import(id)
-
- logger.debug(f"Manual sync triggered for arr config: {id}")
- return jsonify({'success': True}), 200
-
- except Exception as e:
- logger.error(f"Error triggering sync for arr config {id}: {str(e)}")
- return jsonify({'success': False, 'error': str(e)}), 400
diff --git a/backend/app/arr/manager.py b/backend/app/arr/manager.py
deleted file mode 100644
index 7334bef..0000000
--- a/backend/app/arr/manager.py
+++ /dev/null
@@ -1,428 +0,0 @@
-# arr/manager.py
-
-from ..db import get_db
-import json
-import logging
-
-# Import our task-utils that handle DB insertion for scheduled tasks
-from .task_utils import (create_import_task_for_arr_config,
- update_import_task_for_arr_config,
- delete_import_task_for_arr_config)
-
-from ..task.tasks import TaskScheduler
-
-logger = logging.getLogger(__name__)
-logger.setLevel(logging.DEBUG)
-
-
-def save_arr_config(config):
- """
- Create a new arr_config row, then create a corresponding scheduled task (if sync_method != manual).
- Store the newly created task's ID in arr_config.import_task_id.
- """
- with get_db() as conn:
- cursor = conn.cursor()
- try:
- # Check if name already exists
- existing = cursor.execute(
- 'SELECT id FROM arr_config WHERE name = ?',
- (config['name'], )).fetchone()
-
- if existing:
- logger.warning(
- f"[save_arr_config] Attempted to create duplicate config name: {config['name']}"
- )
- return {
- 'success': False,
- 'error': 'Configuration with this name already exists',
- 'status_code': 409
- }
-
- # 1) Insert the arr_config row
- logger.debug(
- f"[save_arr_config] Attempting to create new arr_config with name={config['name']} sync_method={config.get('sync_method')}"
- )
-
- cursor.execute(
- '''
- INSERT INTO arr_config (
- name, type, tags, arr_server, api_key,
- data_to_sync, last_sync_time, sync_percentage,
- sync_method, sync_interval, import_as_unique,
- import_task_id
- )
- VALUES (?, ?, ?, ?, ?, ?, NULL, 0, ?, ?, ?, NULL)
- ''', (
- config['name'],
- config['type'],
- json.dumps(config.get('tags', [])),
- config['arrServer'],
- config['apiKey'],
- json.dumps(config.get('data_to_sync', {})),
- config.get('sync_method', 'manual'),
- config.get('sync_interval', 0),
- config.get('import_as_unique', False),
- ))
- conn.commit()
-
- new_config_id = cursor.lastrowid
- logger.info(
- f"[save_arr_config] Created new arr_config row #{new_config_id} for '{config['name']}'"
- )
-
- # 2) Create a scheduled task row if needed
- sync_method = config.get('sync_method', 'manual')
- sync_interval = config.get('sync_interval', 0)
- task_id = create_import_task_for_arr_config(
- config_id=new_config_id,
- config_name=config['name'],
- sync_method=sync_method,
- sync_interval=sync_interval)
-
- # 3) Update arr_config.import_task_id if a task was created
- if task_id:
- logger.debug(
- f"[save_arr_config] Updating arr_config #{new_config_id} with import_task_id={task_id}"
- )
- cursor.execute(
- 'UPDATE arr_config SET import_task_id = ? WHERE id = ?',
- (task_id, new_config_id))
- conn.commit()
-
- scheduler = TaskScheduler.get_instance()
- if scheduler:
- logger.debug("[save_arr_config] Reloading tasks from DB...")
- scheduler.load_tasks_from_db()
-
- return {'success': True, 'id': new_config_id}
-
- except Exception as e:
- logger.error(
- f"[save_arr_config] Error saving arr config: {str(e)}")
- return {'success': False, 'error': str(e)}
-
-
-def update_arr_config(id, config):
- """
- Update an existing arr_config row, then create/update/remove the corresponding scheduled task as needed.
- """
- with get_db() as conn:
- cursor = conn.cursor()
- try:
- # Check if name already exists (excluding current config)
- existing = cursor.execute(
- 'SELECT id FROM arr_config WHERE name = ? AND id != ?',
- (config['name'], id)).fetchone()
-
- if existing:
- logger.warning(
- f"[update_arr_config] Attempted to update config #{id} to duplicate name: {config['name']}"
- )
- return {
- 'success': False,
- 'error': 'Configuration with this name already exists',
- 'status_code': 409
- }
-
- # 1) Grab existing row so we know the existing import_task_id
- existing_row = cursor.execute(
- 'SELECT * FROM arr_config WHERE id = ?', (id, )).fetchone()
- if not existing_row:
- logger.debug(
- f"[update_arr_config] No arr_config row found with id={id}"
- )
- return {'success': False, 'error': 'Configuration not found'}
-
- existing_task_id = existing_row['import_task_id']
-
- # 2) Update the arr_config row itself
- logger.debug(
- f"[update_arr_config] Updating arr_config #{id} name={config['name']} sync_method={config.get('sync_method')}"
- )
-
- cursor.execute(
- '''
- UPDATE arr_config
- SET name = ?,
- type = ?,
- tags = ?,
- arr_server = ?,
- api_key = ?,
- data_to_sync = ?,
- sync_method = ?,
- sync_interval = ?,
- import_as_unique = ?
- WHERE id = ?
- ''',
- (config['name'], config['type'],
- json.dumps(config.get('tags', [])), config['arrServer'],
- config['apiKey'], json.dumps(config.get(
- 'data_to_sync', {})), config.get('sync_method', 'manual'),
- config.get('sync_interval',
- 0), config.get('import_as_unique', False), id))
- conn.commit()
- if cursor.rowcount == 0:
- logger.debug(
- f"[update_arr_config] arr_config #{id} not found for update"
- )
- return {'success': False, 'error': 'Configuration not found'}
-
- logger.info(f"[update_arr_config] Updated arr_config row #{id}")
-
- # 3) Create/Update/Remove the scheduled task row
- new_task_id = update_import_task_for_arr_config(
- config_id=id,
- config_name=config['name'],
- sync_method=config.get('sync_method', 'manual'),
- sync_interval=config.get('sync_interval', 0),
- existing_task_id=existing_task_id)
-
- # 4) Store new_task_id in arr_config.import_task_id
- logger.debug(
- f"[update_arr_config] Setting arr_config #{id} import_task_id to {new_task_id}"
- )
- cursor.execute(
- 'UPDATE arr_config SET import_task_id = ? WHERE id = ?',
- (new_task_id, id))
- conn.commit()
-
- scheduler = TaskScheduler.get_instance()
- if scheduler:
- logger.debug("[update_arr_config] Reloading tasks from DB...")
- scheduler.load_tasks_from_db()
-
- return {'success': True}
-
- except Exception as e:
- logger.error(
- f"[update_arr_config] Error updating arr config: {str(e)}")
- return {'success': False, 'error': str(e)}
-
-
-def delete_arr_config(id):
- """
- Delete an arr_config row, plus remove its scheduled_task if any.
- """
- with get_db() as conn:
- cursor = conn.cursor()
- try:
- # 1) Fetch the row so we know which task to remove
- existing_row = cursor.execute(
- 'SELECT * FROM arr_config WHERE id = ?', (id, )).fetchone()
- if not existing_row:
- logger.debug(
- f"[delete_arr_config] No arr_config row found with id={id}"
- )
- return {'success': False, 'error': 'Configuration not found'}
-
- existing_task_id = existing_row['import_task_id']
-
- # 2) Delete the arr_config
- logger.debug(f"[delete_arr_config] Removing arr_config #{id}")
- cursor.execute('DELETE FROM arr_config WHERE id = ?', (id, ))
- conn.commit()
- if cursor.rowcount == 0:
- logger.debug(
- f"[delete_arr_config] arr_config #{id} not found for deletion"
- )
- return {'success': False, 'error': 'Configuration not found'}
-
- logger.info(f"[delete_arr_config] Deleted arr_config #{id}")
-
- # 3) If there's a scheduled task, remove it
- if existing_task_id:
- delete_import_task_for_arr_config(existing_task_id)
-
- scheduler = TaskScheduler.get_instance()
- if scheduler:
- logger.debug("[delete_arr_config] Reloading tasks from DB...")
- scheduler.load_tasks_from_db()
-
- return {'success': True}
-
- except Exception as e:
- logger.error(
- f"[delete_arr_config] Error deleting arr config: {str(e)}")
- return {'success': False, 'error': str(e)}
-
-
-def get_all_arr_configs():
- with get_db() as conn:
- cursor = conn.execute('SELECT * FROM arr_config')
- rows = cursor.fetchall()
- try:
- configs = []
- for row in rows:
- configs.append({
- 'id':
- row['id'],
- 'name':
- row['name'],
- 'type':
- row['type'],
- 'tags':
- json.loads(row['tags']) if row['tags'] else [],
- 'arrServer':
- row['arr_server'],
- 'apiKey':
- row['api_key'],
- 'data_to_sync': (json.loads(row['data_to_sync'])
- if row['data_to_sync'] else {}),
- 'last_sync_time':
- row['last_sync_time'],
- 'sync_percentage':
- row['sync_percentage'],
- 'sync_method':
- row['sync_method'],
- 'sync_interval':
- row['sync_interval'],
- 'import_as_unique':
- bool(row['import_as_unique']),
- 'import_task_id':
- row['import_task_id']
- })
- return {'success': True, 'data': configs}
- except Exception as e:
- logger.error(f"[get_all_arr_configs] Error: {str(e)}")
- return {'success': False, 'error': str(e)}
-
-
-def get_arr_config(id):
- with get_db() as conn:
- cursor = conn.execute('SELECT * FROM arr_config WHERE id = ?', (id, ))
- row = cursor.fetchone()
- try:
- if row:
- return {
- 'success': True,
- 'data': {
- 'id':
- row['id'],
- 'name':
- row['name'],
- 'type':
- row['type'],
- 'tags':
- json.loads(row['tags']) if row['tags'] else [],
- 'arrServer':
- row['arr_server'],
- 'apiKey':
- row['api_key'],
- 'data_to_sync': (json.loads(row['data_to_sync'])
- if row['data_to_sync'] else {}),
- 'last_sync_time':
- row['last_sync_time'],
- 'sync_percentage':
- row['sync_percentage'],
-
- # Keep these as-is
- 'sync_method':
- row['sync_method'],
- 'sync_interval':
- row['sync_interval'],
- 'import_as_unique':
- bool(row['import_as_unique']),
- 'import_task_id':
- row['import_task_id']
- }
- }
- logger.debug(
- f"[get_arr_config] No arr_config row found with id={id}")
- return {'success': False, 'error': 'Configuration not found'}
- except Exception as e:
- logger.error(f"[get_arr_config] Error: {str(e)}")
- return {'success': False, 'error': str(e)}
-
-
-def get_scheduled_configs():
- """
- Return all arr_configs where sync_method='schedule'.
- Potentially used if you want to see scheduled ones explicitly.
- """
- with get_db() as conn:
- cursor = conn.execute('SELECT * FROM arr_config WHERE sync_method = ?',
- ('schedule', ))
- rows = cursor.fetchall()
- try:
- configs = []
- for row in rows:
- configs.append({
- 'id': row['id'],
- 'name': row['name'],
- 'sync_interval': row['sync_interval'],
- 'import_task_id': row['import_task_id']
- })
- return {'success': True, 'data': configs}
- except Exception as e:
- logger.error(f"[get_scheduled_configs] Error: {str(e)}")
- return {'success': False, 'error': str(e)}
-
-
-def get_pull_configs():
- with get_db() as conn:
- rows = conn.execute(
- 'SELECT * FROM arr_config WHERE sync_method = "pull"').fetchall()
-
- results = []
- for row in rows:
- results.append({
- 'id':
- row['id'],
- 'name':
- row['name'],
- 'type':
- row['type'],
- 'tags':
- json.loads(row['tags']) if row['tags'] else [],
- 'arrServer':
- row['arr_server'],
- 'apiKey':
- row['api_key'],
- 'data_to_sync': (json.loads(row['data_to_sync'])
- if row['data_to_sync'] else {}),
- 'last_sync_time':
- row['last_sync_time'],
- 'sync_percentage':
- row['sync_percentage'],
- 'sync_method':
- row['sync_method'],
- 'sync_interval':
- row['sync_interval'],
- 'import_as_unique':
- bool(row['import_as_unique']),
- 'import_task_id':
- row['import_task_id']
- })
- return results
-
-
-def check_active_sync_configs():
- """
- Check if there are any ARR configurations with non-manual sync methods.
- Returns (has_active_configs, details) tuple.
- """
- with get_db() as conn:
- cursor = conn.execute('''
- SELECT id, name, sync_method, data_to_sync
- FROM arr_config
- WHERE sync_method != 'manual'
- ''')
- active_configs = cursor.fetchall()
-
- if not active_configs:
- return False, None
-
- details = []
- for config in active_configs:
- data_to_sync = json.loads(
- config['data_to_sync'] if config['data_to_sync'] else '{}')
- if data_to_sync.get('profiles') or data_to_sync.get(
- 'customFormats'):
- details.append({
- 'id': config['id'],
- 'name': config['name'],
- 'sync_method': config['sync_method'],
- 'data': data_to_sync
- })
-
- return bool(details), details
diff --git a/backend/app/arr/status/ping.py b/backend/app/arr/status/ping.py
deleted file mode 100644
index a2d1495..0000000
--- a/backend/app/arr/status/ping.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# app/arr/status/ping.py
-import socket
-import requests
-import logging
-
-logger = logging.getLogger(__name__)
-
-REQUIRED_VERSIONS = {'radarr': '5.10.4', 'sonarr': '4.0.10'}
-
-
-def check_version_compatibility(installed_version, required_version):
- """
- Check if installed version meets minimum required version for Radarr/Sonarr.
- """
- installed_parts = [int(x) for x in installed_version.split('.')]
- required_parts = [int(x) for x in required_version.split('.')]
-
- # Only compare the parts we care about (first 3 numbers for Radarr/Sonarr)
- for installed, required in zip(installed_parts[:3], required_parts[:3]):
- if installed < required:
- return False
- if installed > required:
- return True
- return True
-
-
-def ping_service(url, api_key, arr_type):
- """
- Ping an Arr service and verify its type and version
- """
- try:
- base_url = url.rstrip('/')
- headers = {'X-Api-Key': api_key}
-
- logger.warning(f"Attempting to connect to {base_url} for {arr_type}")
-
- response = requests.get(f"{base_url}/api/v3/system/status",
- headers=headers,
- timeout=10)
-
- logger.warning(f"Response status: {response.status_code}")
- logger.warning(f"Response content: {response.text}")
-
- if response.status_code != 200:
- return False, f"Service returned status code: {response.status_code}"
-
- data = response.json()
- logger.warning(f"Parsed response data: {data}")
-
- # First check app type
- app_name = data.get('appName', '').lower()
- version = data.get('version')
-
- logger.warning(f"Found app: {app_name} version: {version}")
-
- # Check app type
- if arr_type == 'radarr' and app_name != 'radarr':
- return False, f"Expected Radarr but found {app_name}"
- elif arr_type == 'sonarr' and app_name != 'sonarr':
- return False, f"Expected Sonarr but found {app_name}"
-
- # Check version
- if not version:
- return False, "Could not determine application version"
-
- required_version = REQUIRED_VERSIONS.get(arr_type)
- if not check_version_compatibility(version, required_version):
- return False, f"{app_name.title()} version {version} is not supported. Minimum required version is {required_version}"
-
- return True, "Connection successful and application type and version verified"
-
- except requests.exceptions.Timeout:
- return False, "Connection timed out"
- except requests.exceptions.ConnectionError:
- return False, "Failed to connect to service"
- except Exception as e:
- logger.error(f"Error pinging service: {str(e)}")
- return False, f"Error: {str(e)}"
diff --git a/backend/app/arr/task_utils.py b/backend/app/arr/task_utils.py
deleted file mode 100644
index f27545e..0000000
--- a/backend/app/arr/task_utils.py
+++ /dev/null
@@ -1,140 +0,0 @@
-# arr/task_utils.py
-
-import logging
-from ..db import get_db
-
-logger = logging.getLogger(__name__)
-
-
-def create_import_task_for_arr_config(config_id, config_name, sync_method,
- sync_interval):
- """
- Create a scheduled task for the given ARR config (if needed).
- Returns the newly-created task id or None.
- """
- if sync_method == 'manual':
- logger.debug(
- f"[ARR Tasks] No import task created for {config_name} because sync_method=manual"
- )
- return None
-
- with get_db() as conn:
- cursor = conn.cursor()
-
- # pull: not scheduled; on-demand during git pull
- if sync_method == 'pull':
- logger.debug(
- f"[ARR Tasks] No scheduled task created for {config_name} because sync_method=pull (runs on git pull)"
- )
- return None
-
- # schedule: create an interval-based task
- task_type = 'ImportSchedule'
- interval_minutes = sync_interval or 0
-
- # Insert into scheduled_tasks table
- cursor.execute(
- '''
- INSERT INTO scheduled_tasks (name, type, interval_minutes, status)
- VALUES (?, ?, ?, ?)
- ''', (f"Import for ARR #{config_id} - {config_name}", task_type,
- interval_minutes, 'pending'))
- new_task_id = cursor.lastrowid
- conn.commit()
-
- logger.debug(
- f"[ARR Tasks] Created new {task_type} task with ID {new_task_id} for ARR config {config_id}"
- )
- return new_task_id
-
-
-def update_import_task_for_arr_config(config_id, config_name, sync_method,
- sync_interval, existing_task_id):
- """
- Update the existing scheduled task for the given ARR config (if needed).
- If the sync_method changes from 'pull' or 'manual' to 'schedule', we create or update.
- If it changes from 'schedule' to 'pull' (or 'manual'), we delete the old scheduled row.
- """
-
- with get_db() as conn:
- cursor = conn.cursor()
-
- # If user changed to manual or pull => remove the old row (if any)
- if sync_method in ['manual', 'pull']:
- if existing_task_id:
- logger.debug(
- f"[update_import_task_for_arr_config] Removing old task {existing_task_id} because sync_method={sync_method}"
- )
- cursor.execute('DELETE FROM scheduled_tasks WHERE id = ?',
- (existing_task_id, ))
- deleted_count = cursor.rowcount
- conn.commit()
- if deleted_count:
- logger.info(
- f"[update_import_task_for_arr_config] Deleted old task {existing_task_id} for ARR #{config_id}"
- )
- # For 'pull' or 'manual', we do NOT create a new row in `scheduled_tasks`
- return None
-
- # Otherwise, sync_method='schedule' => create or update
- # (We keep the same logic as before if user wants a scheduled import)
- task_type = 'ImportSchedule'
- interval_minutes = sync_interval or 0
-
- # If there's NO existing task, create a new one
- if not existing_task_id:
- logger.debug(
- f"[update_import_task_for_arr_config] No existing task for ARR #{config_id}; creating new schedule."
- )
- return create_import_task_for_arr_config(config_id, config_name,
- sync_method,
- sync_interval)
-
- # If we DO have an existing scheduled task => update it
- logger.debug(
- f"[update_import_task_for_arr_config] Updating existing task {existing_task_id} for ARR #{config_id}, interval={interval_minutes}"
- )
- cursor.execute(
- '''
- UPDATE scheduled_tasks
- SET name = ?, type = ?, interval_minutes = ?
- WHERE id = ?
- ''', (
- f"Import for ARR #{config_id} - {config_name}",
- task_type,
- interval_minutes,
- existing_task_id,
- ))
- updated_count = cursor.rowcount
- conn.commit()
-
- if updated_count == 0:
- logger.warning(
- f"[update_import_task_for_arr_config] Could not find scheduled task {existing_task_id} for ARR #{config_id}, creating new."
- )
- return create_import_task_for_arr_config(config_id, config_name,
- sync_method,
- sync_interval)
-
- logger.debug(
- f"[update_import_task_for_arr_config] Successfully updated scheduled task {existing_task_id} for ARR #{config_id}"
- )
- return existing_task_id
-
-
-def delete_import_task_for_arr_config(task_id):
- """
- Delete the import task if it exists.
- """
- if not task_id:
- return
- with get_db() as conn:
- cursor = conn.cursor()
- cursor.execute('DELETE FROM scheduled_tasks WHERE id = ?', (task_id, ))
- conn.commit()
- if cursor.rowcount > 0:
- logger.debug(f"[ARR Tasks] Deleted import task with ID {task_id}")
- else:
- logger.debug(
- f"[ARR Tasks] No import task found to delete with ID {task_id}"
- )
diff --git a/backend/app/auth/__init__.py b/backend/app/auth/__init__.py
deleted file mode 100644
index 8b07fda..0000000
--- a/backend/app/auth/__init__.py
+++ /dev/null
@@ -1,118 +0,0 @@
-# backend/app/auth/__init__.py
-
-from flask import Blueprint, jsonify, request, session
-from werkzeug.security import generate_password_hash, check_password_hash
-import secrets
-import logging
-from ..db import get_db
-
-logger = logging.getLogger(__name__)
-bp = Blueprint('auth', __name__)
-
-
-@bp.route('/setup', methods=['GET', 'POST'])
-def setup():
- db = get_db()
-
- # Handle GET request to check if setup is needed
- if request.method == 'GET':
- if db.execute('SELECT 1 FROM auth').fetchone():
- return jsonify({'error': 'Auth already configured'}), 400
- return jsonify({'needs_setup': True}), 200
-
- # Handle POST request for actual setup
- if db.execute('SELECT 1 FROM auth').fetchone():
- logger.warning('Failed setup attempt - auth already configured')
- return jsonify({'error': 'Auth already configured'}), 400
-
- data = request.get_json()
- username = data.get('username', 'admin')
- password = data.get('password')
-
- if not password:
- logger.error('Setup failed - no password provided')
- return jsonify({'error': 'Password is required'}), 400
-
- api_key = secrets.token_urlsafe(32)
- password_hash = generate_password_hash(password)
- session_id = secrets.token_urlsafe(32) # Generate a new session ID
-
- try:
- db.execute(
- 'INSERT INTO auth (username, password_hash, api_key, session_id) VALUES (?, ?, ?, ?)',
- (username, password_hash, api_key, session_id))
- db.commit()
- logger.info('Initial auth setup completed successfully')
-
- # Set up session after successful creation
- session['authenticated'] = True
- session['session_id'] = session_id
- session.permanent = True
-
- return jsonify({
- 'message': 'Auth configured successfully',
- 'username': username,
- 'api_key': api_key,
- 'authenticated': True
- })
- except Exception as e:
- logger.error(f'Setup failed - database error: {str(e)}')
- return jsonify({'error': 'Failed to setup authentication'}), 500
-
-
-@bp.route('/authenticate', methods=['POST'])
-def authenticate():
- db = get_db()
- data = request.get_json()
- username = data.get('username')
- password = data.get('password')
- ip_address = request.remote_addr
-
- # Check recent failed attempts
- recent_attempts = db.execute(
- '''
- SELECT COUNT(*) as count FROM failed_attempts
- WHERE ip_address = ?
- AND attempt_time > datetime('now', '-15 minutes')
- ''', (ip_address, )).fetchone()['count']
-
- if recent_attempts >= 5:
- logger.warning(f'Too many failed attempts from IP: {ip_address}')
- return jsonify({'error':
- 'Too many failed attempts. Try again later.'}), 429
-
- if not username or not password:
- logger.warning('Authentication attempt with missing credentials')
- return jsonify({'error': 'Username and password required'}), 400
-
- user = db.execute('SELECT * FROM auth WHERE username = ?',
- (username, )).fetchone()
-
- if user and check_password_hash(user['password_hash'], password):
- # Generate a new session ID
- new_session_id = secrets.token_urlsafe(32)
- db.execute('UPDATE auth SET session_id = ? WHERE username = ?',
- (new_session_id, username))
- db.commit()
-
- # Set up session
- session['authenticated'] = True
- session[
- 'session_id'] = new_session_id # Store session ID in the session
- session.permanent = True
-
- # Clear failed attempts on success
- db.execute('DELETE FROM failed_attempts WHERE ip_address = ?',
- (ip_address, ))
- db.commit()
-
- logger.info(f'Successful authentication for user: {username}')
- return jsonify({'authenticated': True})
-
- # Record failed attempt
- db.execute('INSERT INTO failed_attempts (ip_address) VALUES (?)',
- (ip_address, ))
- db.commit()
-
- logger.warning(f'Failed authentication attempt for user: {username}')
- return jsonify({'error': 'Invalid credentials'}), 401
diff --git a/backend/app/backup/__init__.py b/backend/app/backup/__init__.py
deleted file mode 100644
index 30e16e0..0000000
--- a/backend/app/backup/__init__.py
+++ /dev/null
@@ -1,179 +0,0 @@
-# app/backup/__init__.py
-from flask import Blueprint, request, jsonify, send_file
-import logging
-from ..task.backup.backup import BackupManager
-from ..db import get_db
-import os
-from datetime import datetime
-import tempfile
-import zipfile
-
-logger = logging.getLogger(__name__)
-bp = Blueprint('backup', __name__)
-
-
-@bp.route('', methods=['GET'])
-def list_backups():
- """Get list of all backups"""
- try:
- manager = BackupManager()
- backups = manager.list_backups()
-
- # Add file size and last modified time to each backup
- for backup in backups:
- file_path = os.path.join(manager.backup_dir, backup['filename'])
- if os.path.exists(file_path):
- backup['size'] = os.path.getsize(file_path)
- backup['created_at'] = datetime.fromtimestamp(
- os.path.getmtime(file_path)).isoformat()
- else:
- backup['size'] = None
- backup['created_at'] = None
-
- return jsonify(backups), 200
- except Exception as e:
- logger.error(f'Error listing backups: {str(e)}')
- return jsonify({'error': 'Failed to list backups'}), 500
-
-
-@bp.route('', methods=['POST'])
-def create_backup():
- """Create a new backup manually"""
- try:
- manager = BackupManager()
- success, result = manager.create_backup()
-
- if success:
- return jsonify({
- 'message': 'Backup created successfully',
- 'filename': result
- }), 201
- else:
- return jsonify({'error':
- f'Failed to create backup: {result}'}), 500
- except Exception as e:
- logger.error(f'Error creating backup: {str(e)}')
- return jsonify({'error': 'Failed to create backup'}), 500
-
-
-@bp.route('/', methods=['GET'])
-def download_backup(filename):
- """Download a specific backup file"""
- try:
- manager = BackupManager()
- file_path = os.path.join(manager.backup_dir, filename)
-
- if not os.path.exists(file_path):
- return jsonify({'error': 'Backup file not found'}), 404
-
- return send_file(file_path,
- mimetype='application/zip',
- as_attachment=True,
- download_name=filename)
- except Exception as e:
- logger.error(f'Error downloading backup: {str(e)}')
- return jsonify({'error': 'Failed to download backup'}), 500
-
-
-@bp.route('//restore', methods=['POST'])
-def restore_backup(filename):
- """Restore from a specific backup"""
- try:
- manager = BackupManager()
- success, message = manager.restore_backup(filename)
-
- if success:
- return jsonify({'message': 'Backup restored successfully'}), 200
- else:
- return jsonify({'error':
- f'Failed to restore backup: {message}'}), 500
- except Exception as e:
- logger.error(f'Error restoring backup: {str(e)}')
- return jsonify({'error': 'Failed to restore backup'}), 500
-
-
-@bp.route('/', methods=['DELETE'])
-def delete_backup(filename):
- """Delete a specific backup"""
- try:
- manager = BackupManager()
- file_path = os.path.join(manager.backup_dir, filename)
-
- if not os.path.exists(file_path):
- return jsonify({'error': 'Backup file not found'}), 404
-
- # Remove the file
- os.remove(file_path)
-
- # Remove from database
- with get_db() as conn:
- conn.execute('DELETE FROM backups WHERE filename = ?',
- (filename, ))
- conn.commit()
-
- return jsonify({'message': 'Backup deleted successfully'}), 200
- except Exception as e:
- logger.error(f'Error deleting backup: {str(e)}')
- return jsonify({'error': 'Failed to delete backup'}), 500
-
-
-@bp.route('/import', methods=['POST'])
-def import_backup():
- """Import and restore from an uploaded backup file"""
- if 'file' not in request.files:
- return jsonify({'error': 'No file part in the request'}), 400
-
- file = request.files['file']
-
- if file.filename == '':
- return jsonify({'error': 'No file selected for uploading'}), 400
-
- if not file.filename.endswith('.zip'):
- return jsonify({'error': 'File must be a zip archive'}), 400
-
- try:
- # Create a temporary file to store the upload
- with tempfile.NamedTemporaryFile(delete=False) as temp_file:
- file.save(temp_file.name)
-
- # Validate the zip file
- validation_result = is_valid_backup_zip(temp_file.name)
- if not validation_result[0]:
- os.unlink(temp_file.name)
- return jsonify({'error': validation_result[1]}), 400
-
- # Use the BackupManager to restore from this file
- manager = BackupManager()
- success, message = manager.restore_backup_from_file(temp_file.name)
-
- # Delete the temporary file
- os.unlink(temp_file.name)
-
- if success:
- return jsonify(
- {'message': 'Backup imported and restored successfully'}), 200
- else:
- return jsonify(
- {'error':
- f'Failed to import and restore backup: {message}'}), 500
-
- except Exception as e:
- logger.error(f'Error importing and restoring backup: {str(e)}')
- return jsonify({'error': 'Failed to import and restore backup'}), 500
-
-
-def is_valid_backup_zip(file_path):
- """Check if the zip file is a valid backup"""
- try:
- if os.path.getsize(file_path) > 100 * 1024 * 1024: # 100 MB
- return False, "Backup file is too large (max 100 MB)"
-
- with zipfile.ZipFile(file_path, 'r') as zipf:
- file_list = zipf.namelist()
-
- if 'profilarr.db' not in file_list:
- return False, "Backup file does not contain profilarr.db"
-
- return True, "Valid backup file"
- except zipfile.BadZipFile:
- return False, "Invalid zip file"
diff --git a/backend/app/compile/__init__.py b/backend/app/compile/__init__.py
deleted file mode 100644
index 9df7ddc..0000000
--- a/backend/app/compile/__init__.py
+++ /dev/null
@@ -1,12 +0,0 @@
-# app/compile/__init__.py
-from .mappings import TargetApp, ValueResolver
-from .format_compiler import (CustomFormat, FormatConverter, FormatProcessor,
- compile_custom_format)
-from .profile_compiler import (ProfileConverter, ProfileProcessor,
- compile_quality_profile)
-
-__all__ = [
- 'TargetApp', 'ValueResolver', 'CustomFormat', 'FormatConverter',
- 'FormatProcessor', 'compile_custom_format', 'ProfileConverter',
- 'ProfileProcessor', 'compile_quality_profile'
-]
diff --git a/backend/app/compile/format_compiler.py b/backend/app/compile/format_compiler.py
deleted file mode 100644
index 8859963..0000000
--- a/backend/app/compile/format_compiler.py
+++ /dev/null
@@ -1,224 +0,0 @@
-# app/compile/format_compiler.py
-"""Format compilation module for converting custom formats"""
-from dataclasses import dataclass
-from pathlib import Path
-from typing import Dict, List, Optional
-import json
-import yaml
-
-from .mappings import TargetApp, ValueResolver
-
-
-@dataclass
-class Specification:
- """Data class for format specifications"""
- name: str
- implementation: str
- negate: bool = False
- required: bool = False
- fields: List[Dict[str, str]] = None
-
- def __post_init__(self):
- if self.fields is None:
- self.fields = []
-
-
-@dataclass
-class CustomFormat:
- """Data class for custom format definitions"""
- name: str
- description: str
- tags: List[str]
- conditions: List[Dict]
- tests: List[Dict]
-
-
-@dataclass
-class ConvertedFormat:
- """Data class for converted format output"""
- name: str
- specifications: List[Specification]
-
-
-class FormatConverter:
- """Converts between different format types"""
-
- def __init__(self, patterns: Dict[str, str]):
- self.patterns = patterns
-
- def _create_specification(
- self, condition: Dict,
- target_app: TargetApp) -> Optional[Specification]:
- condition_type = condition['type']
-
- if condition_type in ['release_title', 'release_group', 'edition']:
- pattern_name = condition['pattern']
- pattern = self.patterns.get(pattern_name)
- if not pattern:
- return None
- implementation = ('ReleaseTitleSpecification'
- if condition_type == 'release_title' else
- 'ReleaseGroupSpecification' if condition_type
- == 'release_group' else 'EditionSpecification')
- fields = [{'name': 'value', 'value': pattern}]
-
- elif condition_type == 'source':
- implementation = 'SourceSpecification'
- value = ValueResolver.get_source(condition['source'], target_app)
- fields = [{'name': 'value', 'value': value}]
-
- elif condition_type == 'resolution':
- implementation = 'ResolutionSpecification'
- value = ValueResolver.get_resolution(condition['resolution'])
- fields = [{'name': 'value', 'value': value}]
-
- elif condition_type == 'indexer_flag':
- implementation = 'IndexerFlagSpecification'
- value = ValueResolver.get_indexer_flag(condition.get('flag', ''),
- target_app)
- fields = [{'name': 'value', 'value': value}]
-
- elif condition_type == 'quality_modifier':
- if target_app == TargetApp.SONARR:
- return None
- implementation = 'QualityModifierSpecification'
- value = ValueResolver.get_quality_modifier(
- condition['qualityModifier'])
- fields = [{'name': 'value', 'value': value}]
-
- elif condition_type == 'size':
- implementation = 'SizeSpecification'
- min_size = condition.get('minSize')
- max_size = condition.get('maxSize')
- fields = [{
- 'name': 'min',
- 'value': min_size
- }, {
- 'name': 'max',
- 'value': max_size
- }]
-
- elif condition_type == 'year':
- implementation = 'YearSpecification'
- min_year = condition.get('minYear')
- max_year = condition.get('maxYear')
- fields = [{
- 'name': 'min',
- 'value': min_year
- }, {
- 'name': 'max',
- 'value': max_year
- }]
-
- elif condition_type == 'release_type':
- if target_app == TargetApp.RADARR:
- return None
- implementation = 'ReleaseTypeSpecification'
- value = ValueResolver.get_release_type(condition['releaseType'])
- fields = [{'name': 'value', 'value': value}]
-
- elif condition_type == 'language':
- implementation = 'LanguageSpecification'
- language_name = condition['language'].lower()
- try:
- language_data = ValueResolver.get_language(language_name,
- target_app,
- for_profile=False)
- fields = [{'name': 'value', 'value': language_data['id']}]
- if 'exceptLanguage' in condition:
- except_value = condition['exceptLanguage']
- fields.append({
- 'name': 'exceptLanguage',
- 'value': except_value
- })
- except Exception:
- return None
-
- else:
- return None
-
- return Specification(name=condition.get('name', ''),
- implementation=implementation,
- negate=condition.get('negate', False),
- required=condition.get('required', False),
- fields=fields)
-
- def convert_format(self, custom_format: CustomFormat,
- target_app: TargetApp) -> ConvertedFormat:
- specifications = []
- for condition in custom_format.conditions:
- try:
- spec = self._create_specification(condition, target_app)
- if spec:
- specifications.append(spec)
- except Exception:
- continue
-
- return ConvertedFormat(name=custom_format.name,
- specifications=specifications)
-
-
-class FormatProcessor:
- """Main class for processing format files"""
-
- def __init__(self, input_dir: Path, output_dir: Path, patterns_dir: Path):
- self.input_dir = input_dir
- self.output_dir = output_dir
- self.patterns = self._load_patterns(patterns_dir)
- self.converter = FormatConverter(self.patterns)
-
- @staticmethod
- def _load_patterns(patterns_dir: Path) -> Dict[str, str]:
- patterns = {}
- for file_path in patterns_dir.glob('*.yml'):
- with file_path.open('r') as f:
- pattern_data = yaml.safe_load(f)
- patterns[pattern_data['name']] = pattern_data['pattern']
- return patterns
-
- def _load_custom_format(self, format_name: str) -> Optional[CustomFormat]:
- format_path = self.input_dir / f"{format_name}.yml"
- if not format_path.exists():
- return None
-
- with format_path.open('r') as f:
- raw_data = yaml.safe_load(f)
- return CustomFormat(**raw_data)
-
- def process_format(self,
- format_name: str,
- target_app: TargetApp,
- return_data: bool = False) -> Optional[ConvertedFormat]:
- custom_format = self._load_custom_format(format_name)
- if not custom_format:
- return None
-
- converted_format = self.converter.convert_format(
- custom_format, target_app)
-
- output_data = [{
- 'name':
- converted_format.name,
- 'specifications':
- [vars(spec) for spec in converted_format.specifications]
- }]
-
- if not return_data:
- output_path = self.output_dir / f"{format_name}.json"
- with output_path.open('w') as f:
- json.dump(output_data, f, indent=2)
-
- return converted_format
-
-
-def compile_custom_format(format_data: Dict) -> List[Dict]:
- custom_format = CustomFormat(**format_data)
- patterns = {}
- converter = FormatConverter(patterns)
- converted = converter.convert_format(custom_format, TargetApp.RADARR)
- output_data = [{
- 'name':
- converted.name,
- 'specifications': [vars(spec) for spec in converted.specifications]
- }]
- return output_data
diff --git a/backend/app/compile/mappings.py b/backend/app/compile/mappings.py
deleted file mode 100644
index 8c14874..0000000
--- a/backend/app/compile/mappings.py
+++ /dev/null
@@ -1,990 +0,0 @@
-# app/compile/mappings.py
-"""Centralized constants and mappings for arr applications"""
-from enum import Enum, auto
-from typing import Dict, Any
-import logging
-
-logger = logging.getLogger(__name__)
-logger.setLevel(logging.DEBUG)
-
-
-class TargetApp(Enum):
- """Enum for target application types"""
- RADARR = auto()
- SONARR = auto()
-
-
-class IndexerFlags:
- """Indexer flag mappings for both applications"""
- RADARR = {
- 'freeleech': 1,
- 'halfleech': 2,
- 'double_upload': 4,
- 'internal': 32,
- 'scene': 128,
- 'freeleech_75': 256,
- 'freeleech_25': 512,
- 'nuked': 2048,
- 'ptp_golden': 8,
- 'ptp_approved': 16
- }
-
- SONARR = {
- 'freeleech': 1,
- 'halfleech': 2,
- 'double_upload': 4,
- 'internal': 8,
- 'scene': 16,
- 'freeleech_75': 32,
- 'freeleech_25': 64,
- 'nuked': 128
- }
-
-
-class Sources:
- """Source mappings for both applications"""
- RADARR = {
- 'cam': 1,
- 'telesync': 2,
- 'telecine': 3,
- 'workprint': 4,
- 'dvd': 5,
- 'tv': 6,
- 'web_dl': 7,
- 'webrip': 8,
- 'bluray': 9
- }
-
- SONARR = {
- 'television': 1,
- 'television_raw': 2,
- 'web_dl': 3,
- 'webrip': 4,
- 'dvd': 5,
- 'bluray': 6,
- 'bluray_raw': 7
- }
-
-
-class Quality_Modifiers:
- """Quality modifier mappings for Radarr ONLY"""
- RADARR = {
- 'none': 0,
- 'regional': 1,
- 'screener': 2,
- 'rawhd': 3,
- 'brdisk': 4,
- 'remux': 5,
- }
-
-
-class Release_Types:
- """Release type mappings for Sonarr ONLY"""
- SONARR = {
- 'none': 0,
- 'single_episode': 1,
- 'multi_episode': 2,
- 'season_pack': 3,
- }
-
-
-class Qualities:
- """Quality mappings for both applications"""
- COMMON_RESOLUTIONS = {
- '360p': 360,
- '480p': 480,
- '540p': 540,
- '576p': 576,
- '720p': 720,
- '1080p': 1080,
- '2160p': 2160
- }
-
- RADARR = {
- "Unknown": {
- "id": 0,
- "name": "Unknown",
- "source": "unknown",
- "resolution": 0
- },
- "SDTV": {
- "id": 1,
- "name": "SDTV",
- "source": "tv",
- "resolution": 480
- },
- "DVD": {
- "id": 2,
- "name": "DVD",
- "source": "dvd",
- "resolution": 480
- },
- "WEBDL-1080p": {
- "id": 3,
- "name": "WEBDL-1080p",
- "source": "webdl",
- "resolution": 1080
- },
- "HDTV-720p": {
- "id": 4,
- "name": "HDTV-720p",
- "source": "tv",
- "resolution": 720
- },
- "WEBDL-720p": {
- "id": 5,
- "name": "WEBDL-720p",
- "source": "webdl",
- "resolution": 720
- },
- "Bluray-720p": {
- "id": 6,
- "name": "Bluray-720p",
- "source": "bluray",
- "resolution": 720
- },
- "Bluray-1080p": {
- "id": 7,
- "name": "Bluray-1080p",
- "source": "bluray",
- "resolution": 1080
- },
- "WEBDL-480p": {
- "id": 8,
- "name": "WEBDL-480p",
- "source": "webdl",
- "resolution": 480
- },
- "HDTV-1080p": {
- "id": 9,
- "name": "HDTV-1080p",
- "source": "tv",
- "resolution": 1080
- },
- "Raw-HD": {
- "id": 10,
- "name": "Raw-HD",
- "source": "tv",
- "resolution": 1080
- },
- "WEBRip-480p": {
- "id": 12,
- "name": "WEBRip-480p",
- "source": "webrip",
- "resolution": 480
- },
- "WEBRip-720p": {
- "id": 14,
- "name": "WEBRip-720p",
- "source": "webrip",
- "resolution": 720
- },
- "WEBRip-1080p": {
- "id": 15,
- "name": "WEBRip-1080p",
- "source": "webrip",
- "resolution": 1080
- },
- "HDTV-2160p": {
- "id": 16,
- "name": "HDTV-2160p",
- "source": "tv",
- "resolution": 2160
- },
- "WEBRip-2160p": {
- "id": 17,
- "name": "WEBRip-2160p",
- "source": "webrip",
- "resolution": 2160
- },
- "WEBDL-2160p": {
- "id": 18,
- "name": "WEBDL-2160p",
- "source": "webdl",
- "resolution": 2160
- },
- "Bluray-2160p": {
- "id": 19,
- "name": "Bluray-2160p",
- "source": "bluray",
- "resolution": 2160
- },
- "Bluray-480p": {
- "id": 20,
- "name": "Bluray-480p",
- "source": "bluray",
- "resolution": 480
- },
- "Bluray-576p": {
- "id": 21,
- "name": "Bluray-576p",
- "source": "bluray",
- "resolution": 576
- },
- "BR-DISK": {
- "id": 22,
- "name": "BR-DISK",
- "source": "bluray",
- "resolution": 1080
- },
- "DVD-R": {
- "id": 23,
- "name": "DVD-R",
- "source": "dvd",
- "resolution": 480
- },
- "WORKPRINT": {
- "id": 24,
- "name": "WORKPRINT",
- "source": "workprint",
- "resolution": 0
- },
- "CAM": {
- "id": 25,
- "name": "CAM",
- "source": "cam",
- "resolution": 0
- },
- "TELESYNC": {
- "id": 26,
- "name": "TELESYNC",
- "source": "telesync",
- "resolution": 0
- },
- "TELECINE": {
- "id": 27,
- "name": "TELECINE",
- "source": "telecine",
- "resolution": 0
- },
- "DVDSCR": {
- "id": 28,
- "name": "DVDSCR",
- "source": "dvd",
- "resolution": 480
- },
- "REGIONAL": {
- "id": 29,
- "name": "REGIONAL",
- "source": "dvd",
- "resolution": 480
- },
- "Remux-1080p": {
- "id": 30,
- "name": "Remux-1080p",
- "source": "bluray",
- "resolution": 1080
- },
- "Remux-2160p": {
- "id": 31,
- "name": "Remux-2160p",
- "source": "bluray",
- "resolution": 2160
- }
- }
-
- SONARR = {
- "Unknown": {
- "id": 0,
- "name": "Unknown",
- "source": "unknown",
- "resolution": 0
- },
- "SDTV": {
- "id": 1,
- "name": "SDTV",
- "source": "television",
- "resolution": 480
- },
- "DVD": {
- "id": 2,
- "name": "DVD",
- "source": "dvd",
- "resolution": 480
- },
- "WEBDL-1080p": {
- "id": 3,
- "name": "WEBDL-1080p",
- "source": "web",
- "resolution": 1080
- },
- "HDTV-720p": {
- "id": 4,
- "name": "HDTV-720p",
- "source": "television",
- "resolution": 720
- },
- "WEBDL-720p": {
- "id": 5,
- "name": "WEBDL-720p",
- "source": "web",
- "resolution": 720
- },
- "Bluray-720p": {
- "id": 6,
- "name": "Bluray-720p",
- "source": "bluray",
- "resolution": 720
- },
- "Bluray-1080p": {
- "id": 7,
- "name": "Bluray-1080p",
- "source": "bluray",
- "resolution": 1080
- },
- "WEBDL-480p": {
- "id": 8,
- "name": "WEBDL-480p",
- "source": "web",
- "resolution": 480
- },
- "HDTV-1080p": {
- "id": 9,
- "name": "HDTV-1080p",
- "source": "television",
- "resolution": 1080
- },
- "Raw-HD": {
- "id": 10,
- "name": "Raw-HD",
- "source": "televisionRaw",
- "resolution": 1080
- },
- "WEBRip-480p": {
- "id": 12,
- "name": "WEBRip-480p",
- "source": "webRip",
- "resolution": 480
- },
- "Bluray-480p": {
- "id": 13,
- "name": "Bluray-480p",
- "source": "bluray",
- "resolution": 480
- },
- "WEBRip-720p": {
- "id": 14,
- "name": "WEBRip-720p",
- "source": "webRip",
- "resolution": 720
- },
- "WEBRip-1080p": {
- "id": 15,
- "name": "WEBRip-1080p",
- "source": "webRip",
- "resolution": 1080
- },
- "HDTV-2160p": {
- "id": 16,
- "name": "HDTV-2160p",
- "source": "television",
- "resolution": 2160
- },
- "WEBRip-2160p": {
- "id": 17,
- "name": "WEBRip-2160p",
- "source": "webRip",
- "resolution": 2160
- },
- "WEBDL-2160p": {
- "id": 18,
- "name": "WEBDL-2160p",
- "source": "web",
- "resolution": 2160
- },
- "Bluray-2160p": {
- "id": 19,
- "name": "Bluray-2160p",
- "source": "bluray",
- "resolution": 2160
- },
- "Bluray-1080p Remux": {
- "id": 20,
- "name": "Bluray-1080p Remux",
- "source": "blurayRaw",
- "resolution": 1080
- },
- "Bluray-2160p Remux": {
- "id": 21,
- "name": "Bluray-2160p Remux",
- "source": "blurayRaw",
- "resolution": 2160
- },
- "Bluray-576p": {
- "id": 22,
- "name": "Bluray-576p",
- "source": "bluray",
- "resolution": 576
- }
- }
-
-
-class Languages:
- """Language mappings for both applications"""
- RADARR = {
- 'any': {
- 'id': -1,
- 'name': 'Any'
- },
- 'original': {
- 'id': -2,
- 'name': 'Original'
- },
- 'unknown': {
- 'id': 0,
- 'name': 'Unknown'
- },
- 'english': {
- 'id': 1,
- 'name': 'English'
- },
- 'french': {
- 'id': 2,
- 'name': 'French'
- },
- 'spanish': {
- 'id': 3,
- 'name': 'Spanish'
- },
- 'german': {
- 'id': 4,
- 'name': 'German'
- },
- 'italian': {
- 'id': 5,
- 'name': 'Italian'
- },
- 'danish': {
- 'id': 6,
- 'name': 'Danish'
- },
- 'dutch': {
- 'id': 7,
- 'name': 'Dutch'
- },
- 'japanese': {
- 'id': 8,
- 'name': 'Japanese'
- },
- 'icelandic': {
- 'id': 9,
- 'name': 'Icelandic'
- },
- 'chinese': {
- 'id': 10,
- 'name': 'Chinese'
- },
- 'russian': {
- 'id': 11,
- 'name': 'Russian'
- },
- 'polish': {
- 'id': 12,
- 'name': 'Polish'
- },
- 'vietnamese': {
- 'id': 13,
- 'name': 'Vietnamese'
- },
- 'swedish': {
- 'id': 14,
- 'name': 'Swedish'
- },
- 'norwegian': {
- 'id': 15,
- 'name': 'Norwegian'
- },
- 'finnish': {
- 'id': 16,
- 'name': 'Finnish'
- },
- 'turkish': {
- 'id': 17,
- 'name': 'Turkish'
- },
- 'portuguese': {
- 'id': 18,
- 'name': 'Portuguese'
- },
- 'flemish': {
- 'id': 19,
- 'name': 'Flemish'
- },
- 'greek': {
- 'id': 20,
- 'name': 'Greek'
- },
- 'korean': {
- 'id': 21,
- 'name': 'Korean'
- },
- 'hungarian': {
- 'id': 22,
- 'name': 'Hungarian'
- },
- 'hebrew': {
- 'id': 23,
- 'name': 'Hebrew'
- },
- 'lithuanian': {
- 'id': 24,
- 'name': 'Lithuanian'
- },
- 'czech': {
- 'id': 25,
- 'name': 'Czech'
- },
- 'hindi': {
- 'id': 26,
- 'name': 'Hindi'
- },
- 'romanian': {
- 'id': 27,
- 'name': 'Romanian'
- },
- 'thai': {
- 'id': 28,
- 'name': 'Thai'
- },
- 'bulgarian': {
- 'id': 29,
- 'name': 'Bulgarian'
- },
- 'portuguese_br': {
- 'id': 30,
- 'name': 'Portuguese (Brazil)'
- },
- 'arabic': {
- 'id': 31,
- 'name': 'Arabic'
- },
- 'ukrainian': {
- 'id': 32,
- 'name': 'Ukrainian'
- },
- 'persian': {
- 'id': 33,
- 'name': 'Persian'
- },
- 'bengali': {
- 'id': 34,
- 'name': 'Bengali'
- },
- 'slovak': {
- 'id': 35,
- 'name': 'Slovak'
- },
- 'latvian': {
- 'id': 36,
- 'name': 'Latvian'
- },
- 'spanish_latino': {
- 'id': 37,
- 'name': 'Spanish (Latino)'
- },
- 'catalan': {
- 'id': 38,
- 'name': 'Catalan'
- },
- 'croatian': {
- 'id': 39,
- 'name': 'Croatian'
- },
- 'serbian': {
- 'id': 40,
- 'name': 'Serbian'
- },
- 'bosnian': {
- 'id': 41,
- 'name': 'Bosnian'
- },
- 'estonian': {
- 'id': 42,
- 'name': 'Estonian'
- },
- 'tamil': {
- 'id': 43,
- 'name': 'Tamil'
- },
- 'indonesian': {
- 'id': 44,
- 'name': 'Indonesian'
- },
- 'telugu': {
- 'id': 45,
- 'name': 'Telugu'
- },
- 'macedonian': {
- 'id': 46,
- 'name': 'Macedonian'
- },
- 'slovenian': {
- 'id': 47,
- 'name': 'Slovenian'
- },
- 'malayalam': {
- 'id': 48,
- 'name': 'Malayalam'
- },
- 'kannada': {
- 'id': 49,
- 'name': 'Kannada'
- },
- 'albanian': {
- 'id': 50,
- 'name': 'Albanian'
- },
- 'afrikaans': {
- 'id': 51,
- 'name': 'Afrikaans'
- }
- }
-
- SONARR = {
- 'unknown': {
- 'id': 0,
- 'name': 'Unknown'
- },
- 'english': {
- 'id': 1,
- 'name': 'English'
- },
- 'french': {
- 'id': 2,
- 'name': 'French'
- },
- 'spanish': {
- 'id': 3,
- 'name': 'Spanish'
- },
- 'german': {
- 'id': 4,
- 'name': 'German'
- },
- 'italian': {
- 'id': 5,
- 'name': 'Italian'
- },
- 'danish': {
- 'id': 6,
- 'name': 'Danish'
- },
- 'dutch': {
- 'id': 7,
- 'name': 'Dutch'
- },
- 'japanese': {
- 'id': 8,
- 'name': 'Japanese'
- },
- 'icelandic': {
- 'id': 9,
- 'name': 'Icelandic'
- },
- 'chinese': {
- 'id': 10,
- 'name': 'Chinese'
- },
- 'russian': {
- 'id': 11,
- 'name': 'Russian'
- },
- 'polish': {
- 'id': 12,
- 'name': 'Polish'
- },
- 'vietnamese': {
- 'id': 13,
- 'name': 'Vietnamese'
- },
- 'swedish': {
- 'id': 14,
- 'name': 'Swedish'
- },
- 'norwegian': {
- 'id': 15,
- 'name': 'Norwegian'
- },
- 'finnish': {
- 'id': 16,
- 'name': 'Finnish'
- },
- 'turkish': {
- 'id': 17,
- 'name': 'Turkish'
- },
- 'portuguese': {
- 'id': 18,
- 'name': 'Portuguese'
- },
- 'flemish': {
- 'id': 19,
- 'name': 'Flemish'
- },
- 'greek': {
- 'id': 20,
- 'name': 'Greek'
- },
- 'korean': {
- 'id': 21,
- 'name': 'Korean'
- },
- 'hungarian': {
- 'id': 22,
- 'name': 'Hungarian'
- },
- 'hebrew': {
- 'id': 23,
- 'name': 'Hebrew'
- },
- 'lithuanian': {
- 'id': 24,
- 'name': 'Lithuanian'
- },
- 'czech': {
- 'id': 25,
- 'name': 'Czech'
- },
- 'arabic': {
- 'id': 26,
- 'name': 'Arabic'
- },
- 'hindi': {
- 'id': 27,
- 'name': 'Hindi'
- },
- 'bulgarian': {
- 'id': 28,
- 'name': 'Bulgarian'
- },
- 'malayalam': {
- 'id': 29,
- 'name': 'Malayalam'
- },
- 'ukrainian': {
- 'id': 30,
- 'name': 'Ukrainian'
- },
- 'slovak': {
- 'id': 31,
- 'name': 'Slovak'
- },
- 'thai': {
- 'id': 32,
- 'name': 'Thai'
- },
- 'portuguese_br': {
- 'id': 33,
- 'name': 'Portuguese (Brazil)'
- },
- 'spanish_latino': {
- 'id': 34,
- 'name': 'Spanish (Latino)'
- },
- 'romanian': {
- 'id': 35,
- 'name': 'Romanian'
- },
- 'latvian': {
- 'id': 36,
- 'name': 'Latvian'
- },
- 'persian': {
- 'id': 37,
- 'name': 'Persian'
- },
- 'catalan': {
- 'id': 38,
- 'name': 'Catalan'
- },
- 'croatian': {
- 'id': 39,
- 'name': 'Croatian'
- },
- 'serbian': {
- 'id': 40,
- 'name': 'Serbian'
- },
- 'bosnian': {
- 'id': 41,
- 'name': 'Bosnian'
- },
- 'estonian': {
- 'id': 42,
- 'name': 'Estonian'
- },
- 'tamil': {
- 'id': 43,
- 'name': 'Tamil'
- },
- 'indonesian': {
- 'id': 44,
- 'name': 'Indonesian'
- },
- 'macedonian': {
- 'id': 45,
- 'name': 'Macedonian'
- },
- 'slovenian': {
- 'id': 46,
- 'name': 'Slovenian'
- },
- 'original': {
- 'id': -2,
- 'name': 'Original'
- }
- }
-
-
-class QualityNameMapper:
- """Maps between different quality naming conventions"""
- REMUX_MAPPINGS = {
- TargetApp.SONARR: {
- "Remux-1080p": "Bluray-1080p Remux",
- "Remux-2160p": "Bluray-2160p Remux"
- },
- TargetApp.RADARR: {
- "Remux-1080p": "Remux-1080p",
- "Remux-2160p": "Remux-2160p"
- }
- }
-
- ALTERNATE_NAMES = {
- "BR-Disk": "BR-DISK",
- "BR-DISK": "BR-DISK",
- "BRDISK": "BR-DISK",
- "BR_DISK": "BR-DISK",
- "BLURAY-DISK": "BR-DISK",
- "BLURAY_DISK": "BR-DISK",
- "BLURAYDISK": "BR-DISK",
- "Telecine": "TELECINE",
- "TELECINE": "TELECINE",
- "TeleCine": "TELECINE",
- "Telesync": "TELESYNC",
- "TELESYNC": "TELESYNC",
- "TeleSync": "TELESYNC",
- }
-
- @classmethod
- def map_quality_name(cls, name: str, target_app: TargetApp) -> str:
- """
- Maps quality names between different formats based on target app
- Args:
- name: The quality name to map
- target_app: The target application (RADARR or SONARR)
- Returns:
- The mapped quality name
- """
- # Handle empty or None cases
- if not name:
- return name
-
- # First check for remux mappings
- if name in cls.REMUX_MAPPINGS.get(target_app, {}):
- return cls.REMUX_MAPPINGS[target_app][name]
-
- # Then check for alternate spellings
- normalized_name = name.upper().replace("-", "").replace("_", "")
- for alt_name, standard_name in cls.ALTERNATE_NAMES.items():
- if normalized_name == alt_name.upper().replace("-", "").replace(
- "_", ""):
- return standard_name
-
- return name
-
-
-class LanguageNameMapper:
- """Maps between different language naming conventions"""
- ALTERNATE_NAMES = {
- "spanish-latino": "spanish_latino",
- "spanish_latino": "spanish_latino",
- "spanishlatino": "spanish_latino",
- "portuguese-br": "portuguese_br",
- "portuguese_br": "portuguese_br",
- "portuguesebr": "portuguese_br",
- "portuguese-brazil": "portuguese_br",
- "portuguese_brazil": "portuguese_br"
- }
-
- @classmethod
- def normalize_language_name(cls, name: str) -> str:
- """
- Normalizes language names to a consistent format
- Args:
- name: The language name to normalize
- Returns:
- The normalized language name
- """
- if not name:
- return name
-
- normalized = name.lower().replace(" ", "_")
- return cls.ALTERNATE_NAMES.get(normalized, normalized)
-
-
-class ValueResolver:
- """Helper class to resolve values based on target app"""
-
- @classmethod
- def get_indexer_flag(cls, flag: str, target_app: TargetApp) -> int:
- flags = IndexerFlags.RADARR if target_app == TargetApp.RADARR else IndexerFlags.SONARR
- return flags.get(flag.lower(), 0)
-
- @classmethod
- def get_source(cls, source: str, target_app: TargetApp) -> int:
- sources = Sources.RADARR if target_app == TargetApp.RADARR else Sources.SONARR
- return sources.get(source.lower(), 0)
-
- @classmethod
- def get_resolution(cls, resolution: str) -> int:
- return Qualities.COMMON_RESOLUTIONS.get(resolution.lower(), 0)
-
- @classmethod
- def get_qualities(cls, target_app: TargetApp) -> Dict[str, Any]:
- qualities = Qualities.RADARR if target_app == TargetApp.RADARR else Qualities.SONARR
- return qualities
-
- @classmethod
- def get_quality_name(cls, name: str, target_app: TargetApp) -> str:
- """Maps quality names between different formats based on target app"""
- return QualityNameMapper.map_quality_name(name, target_app)
-
- @classmethod
- def get_quality_modifier(cls, quality_modifier: str) -> int:
- return Quality_Modifiers.RADARR.get(quality_modifier.lower(), 0)
-
- @classmethod
- def get_release_type(cls, release_type: str) -> int:
- return Release_Types.SONARR.get(release_type.lower(), 0)
-
- @classmethod
- def get_language(cls,
- language_name: str,
- target_app: TargetApp,
- for_profile: bool = True) -> Dict[str, Any]:
- """
- Get language mapping based on target app and context
-
- Args:
- language_name: Name of the language to look up
- target_app: Target application (RADARR or SONARR)
- for_profile: If True, this is for a quality profile. If False, this is for a custom format.
- """
- languages = Languages.RADARR if target_app == TargetApp.RADARR else Languages.SONARR
-
- # For profiles, only Radarr uses language settings
- if for_profile and target_app == TargetApp.SONARR:
- return {'id': -2, 'name': 'Original'}
-
- # Normalize the language name
- normalized_name = LanguageNameMapper.normalize_language_name(
- language_name)
- language_data = languages.get(normalized_name)
-
- if not language_data:
- logger.warning(
- f"Language '{language_name}' (normalized: '{normalized_name}') "
- f"not found in {target_app} mappings, falling back to Unknown")
- language_data = languages['unknown']
-
- return language_data
diff --git a/backend/app/compile/profile_compiler.py b/backend/app/compile/profile_compiler.py
deleted file mode 100644
index 497ca4d..0000000
--- a/backend/app/compile/profile_compiler.py
+++ /dev/null
@@ -1,536 +0,0 @@
-"""Profile compilation module for converting quality profiles"""
-from dataclasses import dataclass
-from pathlib import Path
-from typing import Dict, List, Optional, Any, Callable
-import json
-import yaml
-import logging
-import asyncio
-import aiohttp
-
-from .mappings import TargetApp, ValueResolver
-from ..data.utils import load_yaml_file, get_category_directory
-from ..importarr.format_memory import import_format_from_memory, async_import_format_from_memory
-from ..db.queries.settings import get_language_import_score
-
-logger = logging.getLogger(__name__)
-
-
-@dataclass
-class ConvertedProfile:
- """Data class for converted profile output"""
- name: str
- items: List[Dict]
- format_items: List[Dict]
- upgrade_allowed: bool
- min_format_score: int
- cutoff_format_score: int
- min_upgrade_format_score: int
- language: Dict
- cutoff: Optional[int] = None
-
-
-class ProfileConverter:
- """Converts quality profiles between different formats"""
-
- def __init__(self,
- target_app: TargetApp,
- base_url: str = None,
- api_key: str = None,
- format_importer: Callable = None,
- import_as_unique: bool = False):
- self.target_app = target_app
- self.base_url = base_url
- self.api_key = api_key
- self.format_importer = format_importer
- self.import_as_unique = import_as_unique
- self.quality_mappings = ValueResolver.get_qualities(target_app)
-
- def _convert_group_id(self, group_id: int) -> int:
- if group_id < 0:
- return 1000 + abs(group_id)
- return group_id
-
- def _create_all_qualities(self,
- allowed_qualities: List[str]) -> List[Dict]:
- qualities = []
- for quality_name in allowed_qualities:
- if quality_name in self.quality_mappings:
- qualities.append({
- "quality":
- self.quality_mappings[quality_name].copy(),
- "items": [],
- "allowed":
- True
- })
- return qualities
-
- def _generate_language_formats(self,
- behaviour: str,
- language: str) -> List[Dict]:
- """
- Generate language-specific format configurations without importing them.
- This is useful for pre-loading and caching language formats.
-
- Args:
- behaviour: Language behavior ('must', 'prefer', 'only')
- language: Language code ('english', 'french', etc.)
-
- Returns:
- List of format configurations for the specified language
- """
- try:
- formats_to_import = []
-
- # Get the base format as a template
- base_format_path = f"{get_category_directory('custom_format')}/Not English.yml"
- base_format = load_yaml_file(base_format_path)
-
- # Get language data for translations
- language_data = ValueResolver.get_language(
- language, self.target_app, for_profile=False
- )
-
- # Create the main "Not X" format (e.g., "Not French")
- modified_format = base_format.copy()
- base_name = f"Not {language_data['name']}"
- modified_format['name'] = base_name
-
- # Update conditions to refer to the specific language
- for condition in modified_format['conditions']:
- if condition.get('type') == 'language':
- condition['language'] = language
- if condition.get('name') == 'Not English':
- condition['name'] = f"Not {language_data['name']}"
- elif condition.get('name') == 'Includes English':
- condition['name'] = f"Includes {language_data['name']}"
-
- formats_to_import.append(modified_format)
-
- # Add additional formats for 'only' behavior
- if behaviour == 'only':
- additional_formats = [
- "Not Only English", "Not Only English (Missing)"
- ]
- for format_name in additional_formats:
- format_path = f"{get_category_directory('custom_format')}/{format_name}.yml"
- format_data = load_yaml_file(format_path)
- format_data['name'] = format_data['name'].replace(
- 'English', language_data['name'])
-
- for c in format_data.get('conditions', []):
- if c.get('type') == 'language':
- c['language'] = language
- if c.get('name') == 'Not English':
- c['name'] = f"Not {language_data['name']}"
- elif c.get('name') == 'Includes English':
- c['name'] = f"Includes {language_data['name']}"
-
- formats_to_import.append(format_data)
-
- return formats_to_import
-
- except Exception as e:
- logger.error(f"Error generating language formats: {str(e)}")
- raise
-
- def _process_language_formats(
- self,
- behaviour: str,
- language: str,
- import_as_unique: bool = False) -> List[Dict]:
- """
- Process language formats by either importing them directly or using the format_importer.
-
- When using the cached profile import, the format_importer will be a dummy function that
- just returns success without actually importing, since the formats were already imported.
- """
- try:
- # Generate the format configurations
- formats_to_import = self._generate_language_formats(behaviour, language)
- format_configs = []
-
- # Check if we're using a format importer (might be None for direct format returns)
- if self.format_importer is None:
- # No importer provided - we're in the special caching mode
- # Just create the format configs directly without importing
- logger.info(f"Using pre-cached language formats for {behaviour}_{language}")
-
- for format_data in formats_to_import:
- format_name = format_data['name']
- if import_as_unique:
- format_name = f"{format_name} [Dictionarry]"
-
- format_configs.append({
- 'name': format_name,
- 'score': get_language_import_score()
- })
-
- return format_configs
-
- # Regular mode with an importer - check if it's our dummy cached importer
- if self.format_importer and hasattr(self.format_importer, '__name__') and self.format_importer.__name__ == 'cached_format_importer':
- logger.info(f"Using cached importer for language formats {behaviour}_{language}")
- # Simply call the dummy importer just to keep the flow consistent,
- # but we'll generate our own format configs
- self.format_importer()
-
- # Create format configs directly
- for format_data in formats_to_import:
- format_name = format_data['name']
- if import_as_unique:
- format_name = f"{format_name} [Dictionarry]"
-
- format_configs.append({
- 'name': format_name,
- 'score': get_language_import_score()
- })
-
- return format_configs
-
- # If we've reached here, we're doing a regular import
- if not self.base_url or not self.api_key or not self.format_importer:
- logger.error("Missing required credentials or format importer")
- raise ValueError(
- "base_url, api_key, and format_importer are required for language format processing"
- )
-
- arr_type = 'radarr' if self.target_app == TargetApp.RADARR else 'sonarr'
-
- # Use asyncio if there are multiple formats to import
- if len(formats_to_import) > 1:
- # Run in event loop
- return asyncio.run(self._async_process_language_formats(
- formats_to_import=formats_to_import,
- arr_type=arr_type,
- import_as_unique=import_as_unique
- ))
-
- # For single format, use regular synchronous version
- for format_data in formats_to_import:
- try:
- result = import_format_from_memory(
- format_data,
- self.base_url,
- self.api_key,
- arr_type,
- import_as_unique=self.import_as_unique)
- if not result.get('success', False):
- logger.error(
- f"Format import failed for: {format_data['name']}")
- raise Exception(
- f"Failed to import format {format_data['name']}")
-
- format_name = format_data['name']
- if import_as_unique:
- format_name = f"{format_name} [Dictionarry]"
-
- format_configs.append({
- 'name': format_name,
- 'score': get_language_import_score()
- })
-
- except Exception as e:
- logger.error(
- f"Error importing format {format_data['name']}: {str(e)}"
- )
- raise
-
- return format_configs
-
- except Exception as e:
- logger.error(f"Error processing language formats: {str(e)}")
- raise
-
- async def _async_process_language_formats(
- self,
- formats_to_import: List[Dict],
- arr_type: str,
- import_as_unique: bool = False) -> List[Dict]:
- """
- Asynchronous version of _process_language_formats for concurrent imports
- """
- logger.info(f"Processing language formats asynchronously: {len(formats_to_import)} formats")
- format_configs = []
- tasks = []
-
- # Create tasks for all formats
- for format_data in formats_to_import:
- task = asyncio.create_task(
- async_import_format_from_memory(
- format_data=format_data,
- base_url=self.base_url,
- api_key=self.api_key,
- arr_type=arr_type,
- import_as_unique=self.import_as_unique
- )
- )
- tasks.append((format_data['name'], task))
-
- # Process all format import results
- for format_name, task in tasks:
- try:
- result = await task
- if not result.get('success', False):
- logger.error(f"Format import failed for: {format_name} (async)")
- raise Exception(f"Failed to import format {format_name}")
-
- display_name = format_name
- if import_as_unique:
- display_name = f"{format_name} [Dictionarry]"
-
- format_configs.append({
- 'name': display_name,
- 'score': get_language_import_score()
- })
- except Exception as e:
- logger.error(f"Error importing format {format_name}: {str(e)} (async)")
- raise
-
- return format_configs
-
- def convert_quality_group(self, group: Dict) -> Dict:
- original_id = group.get("id", 0)
- converted_id = self._convert_group_id(original_id)
-
- allowed_qualities = []
- for q_item in group.get("qualities", []):
- input_name = q_item.get("name", "")
-
- # First map the quality name to handle remux qualities properly
- mapped_name = ValueResolver.get_quality_name(
- input_name, self.target_app)
-
- # Create a case-insensitive lookup map
- quality_map = {k.lower(): k for k in self.quality_mappings}
-
- # Try to find the mapped name in quality mappings
- if mapped_name.lower() in quality_map:
- allowed_qualities.append(quality_map[mapped_name.lower()])
- # Fallback to the original name
- elif input_name.lower() in quality_map:
- allowed_qualities.append(quality_map[input_name.lower()])
-
- converted_group = {
- "name": group["name"],
- "items": self._create_all_qualities(allowed_qualities),
- "allowed": True,
- "id": converted_id
- }
- return converted_group
-
- def convert_profile(self, profile: Dict) -> ConvertedProfile:
- language = profile.get('language', 'any')
-
- # Handle language processing for advanced mode (with behavior_language format)
- if language != 'any' and '_' in language:
- language_parts = language.split('_', 1)
- behaviour, language_code = language_parts
-
- # Check if we're using a special importer with cached formats
- if self.format_importer and hasattr(self.format_importer, '__name__') and self.format_importer.__name__ == 'cached_format_importer':
- # If we're using the cached importer, skip processing
- # The formats were already added directly to the profile
- pass # Using pre-added language formats
- else:
- # Normal processing path
- try:
- language_formats = self._process_language_formats(
- behaviour, language_code)
- if 'custom_formats' not in profile:
- profile['custom_formats'] = []
- profile['custom_formats'].extend(language_formats)
- except Exception as e:
- logger.error(f"Failed to process language formats: {e}")
-
- # Simple mode: just use the language directly without custom formats
- # This lets the Arr application's built-in language filter handle it
-
- # Get the appropriate language data for the profile
- if language != 'any' and '_' not in language:
- # Simple mode - use the language directly
- selected_language = ValueResolver.get_language(language,
- self.target_app,
- for_profile=True)
- # Using simple language mode
- else:
- # Advanced mode or 'any' - set language to 'any' as filtering is done via formats
- selected_language = ValueResolver.get_language('any',
- self.target_app,
- for_profile=True)
- # Using advanced mode, setting language to 'any'
-
- converted_profile = ConvertedProfile(
- name=profile["name"],
- upgrade_allowed=profile.get("upgradesAllowed", True),
- items=[],
- format_items=[],
- min_format_score=profile.get("minCustomFormatScore", 0),
- cutoff_format_score=profile.get("upgradeUntilScore", 0),
- min_upgrade_format_score=max(1,
- profile.get("minScoreIncrement", 1)),
- language=selected_language)
-
- used_qualities = set()
- quality_ids_in_groups = set()
-
- # First pass: Gather all quality IDs in groups to avoid duplicates
- for quality_entry in profile.get("qualities", []):
- if quality_entry.get("id", 0) < 0: # It's a group
- # Process this group to collect quality IDs
- converted_group = self.convert_quality_group(quality_entry)
- for item in converted_group["items"]:
- if "quality" in item and "id" in item["quality"]:
- quality_ids_in_groups.add(item["quality"]["id"])
-
- # Second pass: Add groups and individual qualities to the profile
- for quality_entry in profile.get("qualities", []):
- if quality_entry.get("id", 0) < 0: # It's a group
- converted_group = self.convert_quality_group(quality_entry)
- if converted_group["items"]:
- converted_profile.items.append(converted_group)
- for q in quality_entry.get("qualities", []):
- used_qualities.add(q.get("name", "").upper())
- else: # It's a single quality
- quality_name = quality_entry.get("name")
- mapped_name = ValueResolver.get_quality_name(
- quality_name, self.target_app)
- if mapped_name in self.quality_mappings:
- converted_profile.items.append({
- "quality": self.quality_mappings[mapped_name],
- "items": [],
- "allowed": True
- })
- used_qualities.add(mapped_name.upper())
-
- # Add all unused qualities as disabled, but skip those already in groups
- for quality_name, quality_data in self.quality_mappings.items():
- if (quality_name.upper() not in used_qualities and
- quality_data["id"] not in quality_ids_in_groups):
- converted_profile.items.append({
- "quality": quality_data,
- "items": [],
- "allowed": False
- })
-
- if "upgrade_until" in profile and "id" in profile["upgrade_until"]:
- cutoff_id = profile["upgrade_until"]["id"]
- cutoff_name = profile["upgrade_until"]["name"]
-
- mapped_cutoff_name = ValueResolver.get_quality_name(
- cutoff_name, self.target_app)
-
- if cutoff_id < 0:
- converted_profile.cutoff = self._convert_group_id(cutoff_id)
- else:
- converted_profile.cutoff = self.quality_mappings[
- mapped_cutoff_name]["id"]
-
- for cf in profile.get("custom_formats", []):
- format_item = {"name": cf["name"], "score": cf["score"]}
- converted_profile.format_items.append(format_item)
-
- # Process app-specific custom formats based on target app
- app_specific_field = None
- if self.target_app == TargetApp.RADARR:
- app_specific_field = "custom_formats_radarr"
- elif self.target_app == TargetApp.SONARR:
- app_specific_field = "custom_formats_sonarr"
-
- if app_specific_field and app_specific_field in profile:
- for cf in profile[app_specific_field]:
- format_name = cf["name"]
- # Apply [Dictionarry] suffix if import_as_unique is enabled
- if self.import_as_unique:
- format_name = f"{format_name} [Dictionarry]"
- format_item = {"name": format_name, "score": cf["score"]}
- converted_profile.format_items.append(format_item)
-
- converted_profile.items.reverse()
-
- return converted_profile
-
-
-class ProfileProcessor:
- """Main class for processing profile files"""
-
- def __init__(self,
- input_dir: Path,
- output_dir: Path,
- target_app: TargetApp,
- base_url: str = None,
- api_key: str = None,
- format_importer: Callable = None):
- self.input_dir = input_dir
- self.output_dir = output_dir
- self.converter = ProfileConverter(target_app, base_url, api_key,
- format_importer)
-
- def _load_profile(self, profile_name: str) -> Optional[Dict]:
- profile_path = self.input_dir / f"{profile_name}.yml"
- if not profile_path.exists():
- return None
- with profile_path.open('r') as f:
- return yaml.safe_load(f)
-
- def process_profile(
- self,
- profile_name: str,
- return_data: bool = False) -> Optional[ConvertedProfile]:
- profile_data = self._load_profile(profile_name)
- if not profile_data:
- return None
-
- converted = self.converter.convert_profile(profile_data)
- if return_data:
- return converted
-
- output_data = [{
- 'name': converted.name,
- 'upgradeAllowed': converted.upgrade_allowed,
- 'items': converted.items,
- 'formatItems': converted.format_items,
- 'minFormatScore': converted.min_format_score,
- 'cutoffFormatScore': converted.cutoff_format_score,
- 'minUpgradeFormatScore': converted.min_upgrade_format_score,
- 'language': converted.language
- }]
-
- if converted.cutoff is not None:
- output_data[0]['cutoff'] = converted.cutoff
-
- output_path = self.output_dir / f"{profile_name}.json"
- with output_path.open('w') as f:
- json.dump(output_data, f, indent=2)
-
- return converted
-
-
-def compile_quality_profile(profile_data: Dict,
- target_app: TargetApp,
- base_url: str = None,
- api_key: str = None,
- format_importer: Callable = None,
- import_as_unique: bool = False) -> List[Dict]:
- converter = ProfileConverter(target_app,
- base_url,
- api_key,
- format_importer,
- import_as_unique=import_as_unique)
- converted = converter.convert_profile(profile_data)
-
- output = {
- 'name': converted.name,
- 'upgradeAllowed': converted.upgrade_allowed,
- 'items': converted.items,
- 'formatItems': converted.format_items,
- 'minFormatScore': converted.min_format_score,
- 'cutoffFormatScore': converted.cutoff_format_score,
- 'minUpgradeFormatScore': converted.min_upgrade_format_score,
- 'language': converted.language
- }
-
- if converted.cutoff is not None:
- output['cutoff'] = converted.cutoff
-
- return [output]
diff --git a/backend/app/config/__init__.py b/backend/app/config/__init__.py
deleted file mode 100644
index 2e04011..0000000
--- a/backend/app/config/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from .config import config
-
-__all__ = ['config']
diff --git a/backend/app/config/config.py b/backend/app/config/config.py
deleted file mode 100644
index 6d70114..0000000
--- a/backend/app/config/config.py
+++ /dev/null
@@ -1,56 +0,0 @@
-import os
-import logging
-
-
-class Config:
- # Base Paths
- CONFIG_DIR = '/config'
- DB_PATH = os.path.join(CONFIG_DIR, 'profilarr.db')
- DB_DIR = os.path.join(CONFIG_DIR, 'db')
-
- REGEX_DIR = os.path.join(DB_DIR, 'regex_patterns')
- FORMAT_DIR = os.path.join(DB_DIR, 'custom_formats')
- PROFILE_DIR = os.path.join(DB_DIR, 'profiles')
- MEDIA_MANAGEMENT_DIR = os.path.join(DB_DIR, 'media_management')
-
- # Logging
- LOG_DIR = os.path.join(CONFIG_DIR, 'log')
- GENERAL_LOG_FILE = os.path.join(LOG_DIR, 'profilarr.log')
- IMPORTARR_LOG_FILE = os.path.join(LOG_DIR, 'importarr.log')
- HASH_LOG_FILE = os.path.join(LOG_DIR, 'hash.log')
-
- # Flask Configuration
- FLASK_ENV = os.getenv('FLASK_ENV', 'production')
- DEBUG = FLASK_ENV == 'development'
-
- # CORS Configuration
- CORS_ORIGINS = "*"
-
- # Session Configuration
- SESSION_LIFETIME_DAYS = 30
- SESSION_COOKIE_SECURE = False
- SESSION_COOKIE_HTTPONLY = True
- SESSION_COOKIE_SAMESITE = 'Lax'
-
- # Git Configuration
- GIT_USER_NAME = os.getenv('GIT_USER_NAME')
- GIT_USER_EMAIL = os.getenv('GIT_USER_EMAIL')
-
- @staticmethod
- def ensure_directories():
- """Create all required directories if they don't exist."""
- directories = [
- Config.CONFIG_DIR, Config.DB_DIR, Config.REGEX_DIR,
- Config.FORMAT_DIR, Config.PROFILE_DIR, Config.MEDIA_MANAGEMENT_DIR, Config.LOG_DIR
- ]
- logger = logging.getLogger(__name__)
- for directory in directories:
- try:
- os.makedirs(directory, exist_ok=True)
- logger.info(f"Ensured directory exists: {directory}")
- except Exception as e:
- logger.error(
- f"Failed to create directory {directory}: {str(e)}")
-
-
-config = Config()
diff --git a/backend/app/db/__init__.py b/backend/app/db/__init__.py
deleted file mode 100644
index a67f20d..0000000
--- a/backend/app/db/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from .connection import get_db
-from .queries.settings import get_settings, get_secret_key, save_settings, update_pat_status
-from .queries.arr import (get_unique_arrs, update_arr_config_on_rename,
- update_arr_config_on_delete)
-from .queries.format_renames import (add_format_to_renames,
- remove_format_from_renames,
- is_format_in_renames)
-from .migrations.runner import run_migrations
-
-__all__ = [
- 'get_db', 'get_settings', 'get_secret_key', 'save_settings',
- 'get_unique_arrs', 'update_arr_config_on_rename',
- 'update_arr_config_on_delete', 'run_migrations', 'add_format_to_renames',
- 'remove_format_from_renames', 'is_format_in_renames', 'update_pat_status'
-]
diff --git a/backend/app/db/connection.py b/backend/app/db/connection.py
deleted file mode 100644
index a6a9998..0000000
--- a/backend/app/db/connection.py
+++ /dev/null
@@ -1,12 +0,0 @@
-# backend/app/db/connection.py
-import sqlite3
-from ..config import config
-
-DB_PATH = config.DB_PATH
-
-
-def get_db():
- """Create and return a database connection with Row factory."""
- conn = sqlite3.connect(DB_PATH)
- conn.row_factory = sqlite3.Row
- return conn
diff --git a/backend/app/db/migrations/__init__.py b/backend/app/db/migrations/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/backend/app/db/migrations/runner.py b/backend/app/db/migrations/runner.py
deleted file mode 100644
index 11bebfe..0000000
--- a/backend/app/db/migrations/runner.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# backend/app/db/migrations/runner.py
-import os
-import importlib
-from pathlib import Path
-from ..connection import get_db
-
-
-def init_migrations():
- """Create migrations table if it doesn't exist."""
- with get_db() as conn:
- conn.execute('''
- CREATE TABLE IF NOT EXISTS migrations (
- id INTEGER PRIMARY KEY AUTOINCREMENT,
- version INTEGER NOT NULL,
- name TEXT NOT NULL,
- applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
- )
- ''')
- conn.commit()
-
-
-def get_applied_migrations():
- """Get list of already applied migrations."""
- with get_db() as conn:
- result = conn.execute(
- 'SELECT version FROM migrations ORDER BY version')
- return [row[0] for row in result.fetchall()]
-
-
-def get_available_migrations():
- """Get all migration files from versions directory."""
- versions_dir = Path(__file__).parent / 'versions'
- migrations = []
-
- for file in versions_dir.glob('[0-9]*.py'):
- if file.stem != '__init__':
- # Import the migration module
- module = importlib.import_module(f'.versions.{file.stem}',
- package='app.db.migrations')
- migrations.append((module.version, module.name, module))
-
- return sorted(migrations, key=lambda x: x[0])
-
-
-def run_migrations():
- """Run all pending migrations in order."""
- init_migrations()
- applied = set(get_applied_migrations())
- available = get_available_migrations()
-
- for version, name, module in available:
- if version not in applied:
- print(f"Applying migration {version}: {name}")
- try:
- module.up()
- with get_db() as conn:
- conn.execute(
- 'INSERT INTO migrations (version, name) VALUES (?, ?)',
- (version, name))
- conn.commit()
- print(f"Successfully applied migration {version}")
- except Exception as e:
- print(f"Error applying migration {version}: {str(e)}")
- raise
diff --git a/backend/app/db/migrations/versions/001_initial_schema.py b/backend/app/db/migrations/versions/001_initial_schema.py
deleted file mode 100644
index e764786..0000000
--- a/backend/app/db/migrations/versions/001_initial_schema.py
+++ /dev/null
@@ -1,145 +0,0 @@
-# backend/app/db/migrations/versions/001_initial_schema.py
-import os
-import secrets
-from ...connection import get_db
-
-version = 1
-name = "initial_schema"
-
-
-def up():
- """Apply the initial database schema."""
- with get_db() as conn:
- # Create backups table
- conn.execute('''
- CREATE TABLE IF NOT EXISTS backups (
- id INTEGER PRIMARY KEY AUTOINCREMENT,
- filename TEXT NOT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
- status TEXT DEFAULT 'pending'
- )
- ''')
-
- # Create arr_config table
- conn.execute('''
- CREATE TABLE IF NOT EXISTS arr_config (
- id INTEGER PRIMARY KEY AUTOINCREMENT,
- name TEXT UNIQUE NOT NULL,
- type TEXT NOT NULL,
- tags TEXT,
- arr_server TEXT NOT NULL,
- api_key TEXT NOT NULL,
- data_to_sync TEXT,
- last_sync_time TIMESTAMP,
- sync_percentage INTEGER DEFAULT 0,
- sync_method TEXT DEFAULT 'manual',
- sync_interval INTEGER DEFAULT 0,
- import_as_unique BOOLEAN DEFAULT 0,
- import_task_id INTEGER DEFAULT NULL,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
- )
- ''')
-
- # Create scheduled_tasks table
- conn.execute('''
- CREATE TABLE IF NOT EXISTS scheduled_tasks (
- id INTEGER PRIMARY KEY AUTOINCREMENT,
- name TEXT NOT NULL,
- type TEXT NOT NULL,
- interval_minutes INTEGER NOT NULL,
- last_run TIMESTAMP,
- status TEXT DEFAULT 'pending',
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
- )
- ''')
-
- # Create settings table
- conn.execute('''
- CREATE TABLE IF NOT EXISTS settings (
- id INTEGER PRIMARY KEY AUTOINCREMENT,
- key TEXT UNIQUE NOT NULL,
- value TEXT,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
- )
- ''')
-
- # Create auth table
- conn.execute('''
- CREATE TABLE IF NOT EXISTS auth (
- username TEXT NOT NULL,
- password_hash TEXT NOT NULL,
- api_key TEXT,
- session_id TEXT,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
- )
- ''')
-
- # Create failed_attempts table
- conn.execute('''
- CREATE TABLE IF NOT EXISTS failed_attempts (
- id INTEGER PRIMARY KEY AUTOINCREMENT,
- ip_address TEXT NOT NULL,
- attempt_time TIMESTAMP DEFAULT CURRENT_TIMESTAMP
- )
- ''')
-
- # Insert initial required data
- required_tasks = [
- ('Repository Sync', 'Sync', 2),
- ('Backup', 'Backup', 1440),
- ]
- for task_name, task_type, interval in required_tasks:
- cursor = conn.execute(
- 'SELECT COUNT(*) FROM scheduled_tasks WHERE name = ?',
- (task_name, ))
- if cursor.fetchone()[0] == 0:
- conn.execute(
- '''
- INSERT INTO scheduled_tasks (name, type, interval_minutes)
- VALUES (?, ?, ?)
- ''', (task_name, task_type, interval))
-
- # Insert initial settings
- conn.execute('''
- INSERT OR IGNORE INTO settings (key, value, updated_at)
- VALUES ('auto_pull_enabled', '0', CURRENT_TIMESTAMP)
- ''')
-
- # Handle profilarr_pat setting
- profilarr_pat = os.environ.get('PROFILARR_PAT')
- conn.execute(
- '''
- INSERT INTO settings (key, value, updated_at)
- VALUES ('has_profilarr_pat', ?, CURRENT_TIMESTAMP)
- ON CONFLICT(key) DO UPDATE SET
- value = ?,
- updated_at = CURRENT_TIMESTAMP
- ''', (str(bool(profilarr_pat)).lower(), str(
- bool(profilarr_pat)).lower()))
-
- # Handle secret_key setting
- secret_key = conn.execute(
- 'SELECT value FROM settings WHERE key = "secret_key"').fetchone()
- if not secret_key:
- new_secret_key = secrets.token_hex(32)
- conn.execute(
- '''
- INSERT INTO settings (key, value, updated_at)
- VALUES ('secret_key', ?, CURRENT_TIMESTAMP)
- ''', (new_secret_key, ))
-
- conn.commit()
-
-
-def down():
- """Revert the initial schema migration."""
- with get_db() as conn:
- # Drop all tables in reverse order of creation
- tables = [
- 'failed_attempts', 'auth', 'settings', 'scheduled_tasks',
- 'arr_config', 'backups'
- ]
- for table in tables:
- conn.execute(f'DROP TABLE IF EXISTS {table}')
- conn.commit()
diff --git a/backend/app/db/migrations/versions/002_format_renames.py b/backend/app/db/migrations/versions/002_format_renames.py
deleted file mode 100644
index 9d0d472..0000000
--- a/backend/app/db/migrations/versions/002_format_renames.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# backend/app/db/migrations/versions/002_format_renames.py
-from ...connection import get_db
-
-version = 2
-name = "format_renames"
-
-
-def up():
- """Add table for tracking which formats to include in renames"""
- with get_db() as conn:
- conn.execute('''
- CREATE TABLE IF NOT EXISTS format_renames (
- format_name TEXT PRIMARY KEY NOT NULL
- )
- ''')
- conn.commit()
-
-
-def down():
- """Remove the format_renames table"""
- with get_db() as conn:
- conn.execute('DROP TABLE IF EXISTS format_renames')
- conn.commit()
diff --git a/backend/app/db/migrations/versions/003_language_import_score.py b/backend/app/db/migrations/versions/003_language_import_score.py
deleted file mode 100644
index 7bb9022..0000000
--- a/backend/app/db/migrations/versions/003_language_import_score.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# backend/app/db/migrations/versions/003_language_import_score.py
-from ...connection import get_db
-
-version = 3
-name = "language_import_score"
-
-
-def up():
- """Add language_import_config table."""
- with get_db() as conn:
- # Create language_import_config table
- conn.execute('''
- CREATE TABLE IF NOT EXISTS language_import_config (
- id INTEGER PRIMARY KEY AUTOINCREMENT,
- score INTEGER NOT NULL DEFAULT -99999,
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
- )
- ''')
-
- # Insert default record
- conn.execute('''
- INSERT INTO language_import_config (score, updated_at)
- VALUES (-99999, CURRENT_TIMESTAMP)
- ''')
-
- conn.commit()
-
-
-def down():
- """Remove language_import_config table."""
- with get_db() as conn:
- conn.execute('DROP TABLE IF EXISTS language_import_config')
- conn.commit()
\ No newline at end of file
diff --git a/backend/app/db/migrations/versions/004_update_language_score_default.py b/backend/app/db/migrations/versions/004_update_language_score_default.py
deleted file mode 100644
index c010870..0000000
--- a/backend/app/db/migrations/versions/004_update_language_score_default.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# backend/app/db/migrations/versions/004_update_language_score_default.py
-from ...connection import get_db
-
-version = 4
-name = "update_language_score_default"
-
-
-def up():
- """Update default language import score to -999999."""
- with get_db() as conn:
- # Update existing record to new default value
- conn.execute('''
- UPDATE language_import_config
- SET score = -999999,
- updated_at = CURRENT_TIMESTAMP
- WHERE id = 1
- ''')
-
- conn.commit()
-
-
-def down():
- """Revert language import score to previous default."""
- with get_db() as conn:
- # Revert to previous default value
- conn.execute('''
- UPDATE language_import_config
- SET score = -99999,
- updated_at = CURRENT_TIMESTAMP
- WHERE id = 1
- ''')
-
- conn.commit()
\ No newline at end of file
diff --git a/backend/app/db/migrations/versions/__init__.py b/backend/app/db/migrations/versions/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/backend/app/db/queries/__init__.py b/backend/app/db/queries/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/backend/app/db/queries/arr.py b/backend/app/db/queries/arr.py
deleted file mode 100644
index 592bc02..0000000
--- a/backend/app/db/queries/arr.py
+++ /dev/null
@@ -1,119 +0,0 @@
-from ..connection import get_db
-import json
-import logging
-
-logger = logging.getLogger(__name__)
-
-
-def get_unique_arrs(arr_ids):
- """
- Get import_as_unique settings for a list of arr IDs.
- Args:
- arr_ids (list): List of arr configuration IDs
- Returns:
- dict: Dictionary mapping arr IDs to their import_as_unique settings and names
- """
- if not arr_ids:
- return {}
-
- with get_db() as conn:
- placeholders = ','.join('?' * len(arr_ids))
- query = f'''
- SELECT id, name, import_as_unique
- FROM arr_config
- WHERE id IN ({placeholders})
- '''
- results = conn.execute(query, arr_ids).fetchall()
-
- return {
- row['id']: {
- 'import_as_unique': bool(row['import_as_unique']),
- 'name': row['name']
- }
- for row in results
- }
-
-
-def update_arr_config_on_rename(category, old_name, new_name):
- """
- Update arr_config data_to_sync when a format or profile is renamed.
- Args:
- category (str): Either 'customFormats' or 'profiles'
- old_name (str): Original name being changed
- new_name (str): New name to change to
- Returns:
- list: IDs of arr_config rows that were updated
- """
- updated_ids = []
-
- with get_db() as conn:
- # Get all configs that might reference this name
- rows = conn.execute(
- 'SELECT id, data_to_sync FROM arr_config WHERE data_to_sync IS NOT NULL'
- ).fetchall()
-
- for row in rows:
- try:
- data = json.loads(row['data_to_sync'])
- # Check if this config has the relevant category data
- if category in data:
- # Update any matching names
- if old_name in data[category]:
- # Replace old name with new name
- data[category] = [
- new_name if x == old_name else x
- for x in data[category]
- ]
- # Save changes back to database
- conn.execute(
- 'UPDATE arr_config SET data_to_sync = ? WHERE id = ?',
- (json.dumps(data), row['id']))
- updated_ids.append(row['id'])
- except json.JSONDecodeError:
- logger.error(f"Invalid JSON in arr_config id={row['id']}")
- continue
-
- if updated_ids:
- conn.commit()
-
- return updated_ids
-
-
-def update_arr_config_on_delete(category, name):
- """
- Update arr_config data_to_sync when a format or profile is deleted.
- Args:
- category (str): Either 'customFormats' or 'profiles'
- name (str): Name being deleted
- Returns:
- list: IDs of arr_config rows that were updated
- """
- updated_ids = []
-
- with get_db() as conn:
- # Get all configs that might reference this name
- rows = conn.execute(
- 'SELECT id, data_to_sync FROM arr_config WHERE data_to_sync IS NOT NULL'
- ).fetchall()
-
- for row in rows:
- try:
- data = json.loads(row['data_to_sync'])
- # Check if this config has the relevant category data
- if category in data:
- # Remove any matching names
- if name in data[category]:
- data[category].remove(name)
- # Save changes back to database
- conn.execute(
- 'UPDATE arr_config SET data_to_sync = ? WHERE id = ?',
- (json.dumps(data), row['id']))
- updated_ids.append(row['id'])
- except json.JSONDecodeError:
- logger.error(f"Invalid JSON in arr_config id={row['id']}")
- continue
-
- if updated_ids:
- conn.commit()
-
- return updated_ids
diff --git a/backend/app/db/queries/format_renames.py b/backend/app/db/queries/format_renames.py
deleted file mode 100644
index 7ef018a..0000000
--- a/backend/app/db/queries/format_renames.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# backend/app/db/queries/format_renames.py
-import logging
-from ..connection import get_db
-
-logger = logging.getLogger(__name__)
-
-
-def add_format_to_renames(format_name: str) -> None:
- """Add a format to the renames table"""
- with get_db() as conn:
- conn.execute(
- 'INSERT OR REPLACE INTO format_renames (format_name) VALUES (?)',
- (format_name, ))
- conn.commit()
- logger.info(f"Added format to renames table: {format_name}")
-
-
-def remove_format_from_renames(format_name: str) -> None:
- """Remove a format from the renames table"""
- with get_db() as conn:
- conn.execute('DELETE FROM format_renames WHERE format_name = ?',
- (format_name, ))
- conn.commit()
- logger.info(f"Removed format from renames table: {format_name}")
-
-
-def is_format_in_renames(format_name: str) -> bool:
- """Check if a format is in the renames table"""
- with get_db() as conn:
- result = conn.execute(
- 'SELECT 1 FROM format_renames WHERE format_name = ?',
- (format_name, )).fetchone()
- return bool(result)
diff --git a/backend/app/db/queries/settings.py b/backend/app/db/queries/settings.py
deleted file mode 100644
index 648fa2c..0000000
--- a/backend/app/db/queries/settings.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# backend/app/db/queries/settings.py
-from ..connection import get_db
-import logging
-import os
-
-logger = logging.getLogger(__name__)
-
-
-def get_settings():
- with get_db() as conn:
- result = conn.execute(
- 'SELECT key, value FROM settings WHERE key NOT IN ("secret_key")'
- ).fetchall()
- settings = {row['key']: row['value'] for row in result}
- return settings if 'gitRepo' in settings else None
-
-
-def get_secret_key():
- with get_db() as conn:
- result = conn.execute(
- 'SELECT value FROM settings WHERE key = "secret_key"').fetchone()
- return result['value'] if result else None
-
-
-def save_settings(settings_dict):
- with get_db() as conn:
- for key, value in settings_dict.items():
- conn.execute(
- '''
- INSERT INTO settings (key, value, updated_at)
- VALUES (?, ?, CURRENT_TIMESTAMP)
- ON CONFLICT(key) DO UPDATE SET
- value = excluded.value,
- updated_at = CURRENT_TIMESTAMP
- ''', (key, value))
- conn.commit()
-
-
-def update_pat_status():
- """Update the has_profilarr_pat setting based on current environment."""
- with get_db() as conn:
- profilarr_pat = os.environ.get('PROFILARR_PAT')
- pat_exists = str(bool(profilarr_pat)).lower()
-
- # Get current value
- current = conn.execute('SELECT value FROM settings WHERE key = ?',
- ('has_profilarr_pat', )).fetchone()
-
- conn.execute(
- '''
- INSERT INTO settings (key, value, updated_at)
- VALUES ('has_profilarr_pat', ?, CURRENT_TIMESTAMP)
- ON CONFLICT(key) DO UPDATE SET
- value = ?,
- updated_at = CURRENT_TIMESTAMP
- ''', (pat_exists, pat_exists))
- conn.commit()
-
- if current is None:
- logger.info(f"PAT status created: {pat_exists}")
- elif current[0] != pat_exists:
- logger.info(
- f"PAT status updated from {current[0]} to {pat_exists}")
- else:
- logger.debug("PAT status unchanged")
-
-
-def get_language_import_score():
- """Get the current language import score."""
- with get_db() as conn:
- result = conn.execute(
- 'SELECT score FROM language_import_config ORDER BY id DESC LIMIT 1'
- ).fetchone()
- return result['score'] if result else -99999
-
-
-def update_language_import_score(score):
- """Update the language import score."""
- with get_db() as conn:
- # Get current score first
- current = conn.execute(
- 'SELECT score FROM language_import_config ORDER BY id DESC LIMIT 1'
- ).fetchone()
- current_score = current['score'] if current else None
-
- # Check if record exists
- existing = conn.execute(
- 'SELECT id FROM language_import_config ORDER BY id DESC LIMIT 1'
- ).fetchone()
-
- if existing:
- # Update existing record
- conn.execute(
- '''
- UPDATE language_import_config
- SET score = ?, updated_at = CURRENT_TIMESTAMP
- WHERE id = ?
- ''', (score, existing['id']))
- else:
- # Insert new record
- conn.execute(
- '''
- INSERT INTO language_import_config (score, updated_at)
- VALUES (?, CURRENT_TIMESTAMP)
- ''', (score,))
-
- conn.commit()
- if current_score is not None:
- logger.info(f"Language import score updated from {current_score} to {score}")
- else:
- logger.info(f"Language import score set to: {score}")
diff --git a/backend/app/git/__init__.py b/backend/app/git/__init__.py
deleted file mode 100644
index 9304ea6..0000000
--- a/backend/app/git/__init__.py
+++ /dev/null
@@ -1,403 +0,0 @@
-# git/__init__.py
-from flask import Blueprint, request, jsonify
-from .status.status import get_git_status
-from .status.commit_history import get_git_commit_history
-from .branches.manager import Branch_Manager
-from .operations.manager import GitOperations
-from .repo.unlink import unlink_repository
-from .repo.clone import clone_repository
-from ..db import save_settings, get_settings
-from ..config.config import config
-import logging
-
-logger = logging.getLogger(__name__)
-logger.setLevel(logging.DEBUG)
-
-bp = Blueprint('git', __name__)
-
-REPO_PATH = config.DB_DIR
-branch_manager = Branch_Manager(REPO_PATH)
-git_operations = GitOperations(REPO_PATH)
-
-
-@bp.route('/clone', methods=['POST'])
-def handle_clone_repository():
- try:
- new_settings = request.json
- logger.info(f"Received new settings: {new_settings}")
-
- if 'gitRepo' not in new_settings:
- logger.error("Missing required field: gitRepo")
- return jsonify({"error": "Missing required field: gitRepo"}), 400
-
- success, message = clone_repository(new_settings['gitRepo'], REPO_PATH)
-
- if success:
- # Store repository URL in database
- save_settings({'gitRepo': new_settings['gitRepo']})
- logger.info("Settings updated and repository cloned successfully")
- return jsonify({
- "message":
- "Repository cloned and settings updated successfully"
- }), 200
- else:
- logger.error(f"Failed to clone repository: {message}")
- return jsonify({"error": message}), 400
-
- except Exception as e:
- logger.exception("Unexpected error in clone_repository")
- return jsonify({"error": f"Failed to clone repository: {str(e)}"}), 500
-
-
-@bp.route('/status', methods=['GET'])
-def get_status():
- logger.debug("Received request for git status")
- success, message = get_git_status(REPO_PATH)
-
- if isinstance(message, str) and "No git repository" in message:
- return jsonify({'success': True, 'data': None}), 200
-
- if success:
- logger.debug("Successfully retrieved git status")
- return jsonify({'success': True, 'data': message}), 200
- else:
- logger.error(f"Failed to retrieve git status: {message}")
- return jsonify({'success': False, 'error': message}), 400
-
-
-@bp.route('/branch', methods=['POST'])
-def create_branch():
- branch_name = request.json.get('name')
- base_branch = request.json.get('base', 'main')
- logger.debug(
- f"Received request to create branch {branch_name} from {base_branch}")
- success, result = branch_manager.create(branch_name, base_branch)
- if success:
- logger.debug(f"Successfully created branch: {branch_name}")
- return jsonify({'success': True, **result}), 200
- else:
- logger.error(f"Failed to create branch: {result}")
- if 'merging' in result.get('error', '').lower():
- return jsonify({'success': False, 'error': result}), 409
- return jsonify({'success': False, 'error': result}), 400
-
-
-@bp.route('/branches', methods=['GET'])
-def get_branches():
- logger.debug("Received request for branches")
- success, result = branch_manager.get_all()
- if success:
- logger.debug("Successfully retrieved branches")
- return jsonify({'success': True, 'data': result}), 200
- else:
- logger.error(f"Failed to retrieve branches: {result}")
- return jsonify({'success': False, 'error': result}), 400
-
-
-@bp.route('/checkout', methods=['POST'])
-def checkout_branch():
- branch_name = request.json.get('branch')
- logger.debug(f"Received request to checkout branch: {branch_name}")
- success, result = branch_manager.checkout(branch_name)
- if success:
- logger.debug(f"Successfully checked out branch: {branch_name}")
- return jsonify({'success': True, **result}), 200
- else:
- logger.error(f"Failed to checkout branch: {result}")
- if 'merging' in result.get('error', '').lower():
- return jsonify({'success': False, 'error': result}), 409
- return jsonify({'success': False, 'error': result}), 400
-
-
-@bp.route('/branch/', methods=['DELETE'])
-def delete_branch(branch_name):
- logger.debug(f"Received request to delete branch: {branch_name}")
- success, result = branch_manager.delete(branch_name)
- if success:
- logger.debug(f"Successfully deleted branch: {branch_name}")
- return jsonify({'success': True, **result}), 200
- else:
- logger.error(f"Failed to delete branch: {result}")
- if 'merging' in result.get('error', '').lower():
- return jsonify({'success': False, 'error': result}), 409
- return jsonify({'success': False, 'error': result}), 400
-
-
-@bp.route('/branch/push', methods=['POST'])
-def push_branch():
- data = request.json
- logger.debug(f"Received request to push branch: {data}")
- branch_name = data.get('branch')
- if not branch_name:
- return jsonify({
- "success": False,
- "error": "Branch name is required"
- }), 400
-
- success, result = branch_manager.push(branch_name)
- if success:
- return jsonify({"success": True, "message": result}), 200
- else:
- if isinstance(result, str):
- return jsonify({"success": False, "error": result}), 400
- return jsonify({
- "success": False,
- "error": result.get('error', 'Unknown error occurred')
- }), 400
-
-
-@bp.route('/commit', methods=['POST'])
-def commit_files():
- files = request.json.get('files', [])
- user_commit_message = request.json.get('commit_message', "Commit changes")
- logger.debug(f"Received request to commit files: {files}")
-
- commit_message = generate_commit_message(user_commit_message, files)
- success, message = git_operations.commit(files, commit_message)
-
- if success:
- logger.debug("Successfully committed files")
- return jsonify({'success': True, 'message': message}), 200
- else:
- logger.error(f"Error committing files: {message}")
- return jsonify({'success': False, 'error': message}), 400
-
-
-@bp.route('/push', methods=['POST'])
-def push_files():
- logger.debug("Received request to push changes")
- success, message = git_operations.push()
-
- if success:
- logger.debug("Successfully pushed changes")
- return jsonify({'success': True, 'message': message}), 200
- else:
- logger.error(f"Error pushing changes: {message}")
- return jsonify({'success': False, 'error': message}), 400
-
-
-@bp.route('/revert', methods=['POST'])
-def revert_file():
- file_path = request.json.get('file_path')
- if not file_path:
- return jsonify({
- 'success': False,
- 'error': "File path is required."
- }), 400
- success, message = git_operations.revert(file_path)
- if success:
- return jsonify({'success': True, 'message': message}), 200
- else:
- logger.error(f"Error reverting file: {message}")
- return jsonify({'success': False, 'error': message}), 400
-
-
-@bp.route('/revert-all', methods=['POST'])
-def revert_all():
- success, message = git_operations.revert_all()
- if success:
- return jsonify({'success': True, 'message': message}), 200
- else:
- logger.error(f"Error reverting all changes: {message}")
- return jsonify({'success': False, 'error': message}), 400
-
-
-@bp.route('/file', methods=['DELETE'])
-def delete_file():
- file_path = request.json.get('file_path')
- if not file_path:
- return jsonify({
- 'success': False,
- 'error': "File path is required."
- }), 400
- success, message = git_operations.delete(file_path)
- if success:
- return jsonify({'success': True, 'message': message}), 200
- else:
- logger.error(f"Error deleting file: {message}")
- return jsonify({'success': False, 'error': message}), 400
-
-
-@bp.route('/pull', methods=['POST'])
-def pull_branch():
- branch_name = request.json.get('branch')
- success, response = git_operations.pull(branch_name)
-
- # Handle different response types
- if isinstance(response, dict):
- if response.get('state') == 'resolve':
- # Merge conflict is now a success case with state='resolve'
- return jsonify({
- 'success': True,
- 'state': 'resolve',
- 'message': response['message'],
- 'details': response['details']
- }), 200
- elif response.get('state') == 'error':
- # Handle error states
- return jsonify({
- 'success': False,
- 'state': 'error',
- 'message': response['message'],
- 'details': response.get('details', {})
- }), 409 if response.get('type') in [
- 'merge_conflict', 'uncommitted_changes'
- ] else 400
- elif response.get('state') == 'complete':
- # Normal success case
- return jsonify({
- 'success': True,
- 'state': 'complete',
- 'message': response['message'],
- 'details': response.get('details', {})
- }), 200
-
- # Fallback for string responses or unexpected formats
- if success:
- return jsonify({
- 'success': True,
- 'state': 'complete',
- 'message': response
- }), 200
- return jsonify({
- 'success': False,
- 'state': 'error',
- 'message': str(response)
- }), 400
-
-
-@bp.route('/stage', methods=['POST'])
-def handle_stage_files():
- files = request.json.get('files', [])
- success, message = git_operations.stage(files)
- if success:
- return jsonify({'success': True, 'message': message}), 200
- else:
- return jsonify({'success': False, 'error': message}), 400
-
-
-@bp.route('/unstage', methods=['POST'])
-def handle_unstage_files():
- files = request.json.get('files', [])
- success, message = git_operations.unstage(files)
- if success:
- return jsonify({'success': True, 'message': message}), 200
- else:
- return jsonify({'success': False, 'error': message}), 400
-
-
-@bp.route('/unlink', methods=['POST'])
-def unlink():
- data = request.get_json()
- remove_files = data.get('removeFiles', False)
- success, message = unlink_repository(REPO_PATH, remove_files)
- if success:
- return jsonify({'success': True, 'message': message}), 200
- else:
- return jsonify({'success': False, 'error': message}), 400
-
-
-def generate_commit_message(user_message, files):
- return user_message
-
-
-@bp.route('/resolve', methods=['POST'])
-def resolve_conflicts():
- logger.debug("Received request to resolve conflicts")
- resolutions = request.json.get('resolutions')
-
- if not resolutions:
- return jsonify({
- 'success': False,
- 'error': "Resolutions are required"
- }), 400
-
- result = git_operations.resolve(resolutions)
-
- if result.get('success'):
- logger.debug("Successfully resolved conflicts")
- return jsonify(result), 200
- else:
- logger.error(f"Error resolving conflicts: {result.get('error')}")
- return jsonify(result), 400
-
-
-@bp.route('/merge/finalize', methods=['POST'])
-def finalize_merge():
- """
- Route to finalize a merge after all conflicts have been resolved.
- Expected to be called only after all conflicts are resolved and changes are staged.
- """
- logger.debug("Received request to finalize merge")
-
- result = git_operations.finalize_merge()
-
- if result.get('success'):
- logger.debug(
- f"Successfully finalized merge with files: {result.get('committed_files', [])}"
- )
- return jsonify({
- 'success': True,
- 'message': result.get('message'),
- 'committed_files': result.get('committed_files', [])
- }), 200
- else:
- logger.error(f"Error finalizing merge: {result.get('error')}")
- return jsonify({'success': False, 'error': result.get('error')}), 400
-
-
-@bp.route('/merge/abort', methods=['POST'])
-def abort_merge():
- logger.debug("Received request to abort merge")
- success, message = git_operations.abort_merge()
- if success:
- logger.debug("Successfully aborted merge")
- return jsonify({'success': True, 'message': message}), 200
- else:
- logger.error(f"Error aborting merge: {message}")
- return jsonify({'success': False, 'error': message}), 400
-
-
-@bp.route('/commits', methods=['GET'])
-def get_commit_history():
- logger.debug("Received request for commit history")
- branch = request.args.get('branch') # Optional branch parameter
- success, result = get_git_commit_history(REPO_PATH, branch)
-
- if success:
- logger.debug("Successfully retrieved commit history")
- return jsonify({'success': True, 'data': result}), 200
- else:
- logger.error(f"Failed to retrieve commit history: {result}")
- return jsonify({'success': False, 'error': result}), 400
-
-
-@bp.route('/autopull', methods=['GET', 'POST'])
-def handle_auto_pull():
- try:
- if request.method == 'GET':
- settings = get_settings()
- return jsonify({
- 'success':
- True,
- 'enabled':
- bool(int(settings.get('auto_pull_enabled', 0)))
- }), 200
-
- # POST handling
- data = request.json
- enabled = data.get('enabled')
- if enabled is None:
- return jsonify({
- 'success': False,
- 'error': 'enabled field is required'
- }), 400
-
- save_settings({'auto_pull_enabled': 1 if enabled else 0})
- logger.info(
- f"Auto-pull has been {'enabled' if enabled else 'disabled'}")
- return jsonify({'success': True}), 200
-
- except Exception as e:
- logger.error(f"Error handling auto pull setting: {str(e)}")
- return jsonify({'success': False, 'error': str(e)}), 500
diff --git a/backend/app/git/auth/authenticate.py b/backend/app/git/auth/authenticate.py
deleted file mode 100644
index 37bf28a..0000000
--- a/backend/app/git/auth/authenticate.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# git/auth/authenticate.py
-import os
-import logging
-
-logger = logging.getLogger(__name__)
-
-
-class GitHubAuth:
- """
- A modular authentication handler for GitHub repositories.
- Supports Personal Access Tokens (PAT) for HTTPS authentication.
- """
-
- @staticmethod
- def get_authenticated_url(https_url):
- """
- Convert an HTTPS URL to include authentication via PAT.
- Ensures the token is not duplicated in the URL.
- """
- token = os.getenv("PROFILARR_PAT")
- if not token:
- raise ValueError(
- "PROFILARR_PAT is not set in environment variables")
-
- # Check if the URL already contains authentication
- if "@" in https_url:
- # Already has some form of authentication, remove it to add our token
- # This handles URLs that might have a token already
- protocol_part, rest = https_url.split("://", 1)
- if "@" in rest:
- # Remove any existing authentication
- _, server_part = rest.split("@", 1)
- https_url = f"{protocol_part}://{server_part}"
-
- # Now add our token
- authenticated_url = https_url.replace("https://", f"https://{token}@")
- return authenticated_url
-
- @staticmethod
- def verify_token():
- """
- Verify if the Personal Access Token is valid.
- """
- token = os.getenv("PROFILARR_PAT")
- if not token:
- logger.error("PROFILARR_PAT is not set")
- return False
- logger.info("Token verification skipped (assume valid)")
- return True
diff --git a/backend/app/git/branches/create.py b/backend/app/git/branches/create.py
deleted file mode 100644
index 9134cda..0000000
--- a/backend/app/git/branches/create.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# git/branches/create.py
-import git
-import logging
-
-logger = logging.getLogger(__name__)
-
-def create_branch(repo_path, branch_name, base_branch='main'):
- try:
- logger.debug(f"Attempting to create branch {branch_name} from {base_branch}")
- repo = git.Repo(repo_path)
-
- # Check if the branch already exists
- if branch_name in repo.heads:
- return False, f"Branch '{branch_name}' already exists."
-
- # Create and checkout the new branch
- new_branch = repo.create_head(branch_name, commit=base_branch)
- new_branch.checkout()
-
- logger.debug(f"Successfully created branch: {branch_name}")
- return True, {"message": f"Created branch: {branch_name}", "current_branch": branch_name}
- except Exception as e:
- logger.error(f"Error creating branch: {str(e)}", exc_info=True)
- return False, {"error": f"Error creating branch: {str(e)}"}
diff --git a/backend/app/git/branches/delete.py b/backend/app/git/branches/delete.py
deleted file mode 100644
index e6190fd..0000000
--- a/backend/app/git/branches/delete.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# git/branches/delete.py
-
-import git
-from git.exc import GitCommandError
-import logging
-
-logger = logging.getLogger(__name__)
-logger.setLevel(logging.DEBUG)
-
-
-def delete_branch(repo_path, branch_name):
- try:
- logger.debug(f"Attempting to delete branch: {branch_name}")
- logger.debug(
- f"Attempting to delete branch from repo at path: {repo_path}")
- repo = git.Repo(repo_path)
-
- # Fetch updates from remote
- logger.debug("Fetching updates from remote...")
- repo.git.fetch('--all')
-
- # Update local repository state
- logger.debug("Updating local repository state...")
- repo.git.remote('update', 'origin', '--prune')
-
- # Check if it's a local branch
- if branch_name in repo.heads:
- logger.debug(f"Deleting local branch: {branch_name}")
- if repo.active_branch.name == branch_name:
- return False, f"Cannot delete the current branch: {branch_name}"
- repo.delete_head(branch_name, force=True)
- logger.debug(f"Local branch {branch_name} deleted")
-
- # Check if remote branch exists
- remote_branch = f"origin/{branch_name}"
- if remote_branch in repo.refs:
- pass
-
- return True, {
- "message": f"Deleted branch: {branch_name}",
- "current_branch": repo.active_branch.name
- }
-
- except Exception as e:
- logger.error(f"Error deleting branch: {str(e)}", exc_info=True)
- return False, {"error": f"Error deleting branch: {str(e)}"}
diff --git a/backend/app/git/branches/get.py b/backend/app/git/branches/get.py
deleted file mode 100644
index 94bd534..0000000
--- a/backend/app/git/branches/get.py
+++ /dev/null
@@ -1,48 +0,0 @@
-import git
-import logging
-from flask import Blueprint, jsonify
-
-logger = logging.getLogger(__name__)
-
-def get_branches(repo_path):
- try:
- logger.debug("Attempting to get branches")
- repo = git.Repo(repo_path)
-
- # Get local branches
- local_branches = [{'name': branch.name, 'isLocal': True, 'isRemote': False} for branch in repo.heads]
- logger.debug(f"Local branches found: {[branch['name'] for branch in local_branches]}")
-
- # Get remote branches
- remote_branches = [{'name': ref.remote_head, 'isLocal': False, 'isRemote': True} for ref in repo.remote().refs if not ref.remote_head == 'HEAD']
- logger.debug(f"Remote branches found: {[branch['name'] for branch in remote_branches]}")
-
- # Combine and update status for branches that are both local and remote
- all_branches = local_branches + remote_branches
- branch_dict = {}
- for branch in all_branches:
- if branch['name'] in branch_dict:
- branch_dict[branch['name']]['isLocal'] = branch_dict[branch['name']]['isLocal'] or branch['isLocal']
- branch_dict[branch['name']]['isRemote'] = branch_dict[branch['name']]['isRemote'] or branch['isRemote']
- else:
- branch_dict[branch['name']] = branch
-
- all_branches = list(branch_dict.values())
-
- logger.debug(f"All branches combined (local and remote): {[branch['name'] for branch in all_branches]}")
- logger.info(f"Branches being sent: {[branch['name'] for branch in all_branches]}")
-
- return True, {"branches": all_branches}
- except Exception as e:
- logger.error(f"Error getting branches: {str(e)}", exc_info=True)
- return False, {"error": f"Error getting branches: {str(e)}"}
-
-def get_current_branch(repo_path):
- try:
- repo = git.Repo(repo_path)
- current_branch = repo.active_branch.name
- logger.debug(f"Current branch: {current_branch}")
- return current_branch
- except Exception as e:
- logger.error(f"Error getting current branch: {str(e)}", exc_info=True)
- return None
diff --git a/backend/app/git/branches/manager.py b/backend/app/git/branches/manager.py
deleted file mode 100644
index 31f959e..0000000
--- a/backend/app/git/branches/manager.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# git/branches/branches.py
-
-import git
-import os
-from .create import create_branch
-from .checkout import checkout_branch
-from .delete import delete_branch
-from .get import get_branches, get_current_branch
-from .push import push_branch_to_remote
-
-
-class Branch_Manager:
-
- def __init__(self, repo_path):
- self.repo_path = repo_path
-
- def is_merging(self):
- repo = git.Repo(self.repo_path)
- return os.path.exists(os.path.join(repo.git_dir, 'MERGE_HEAD'))
-
- def create(self, branch_name, base_branch='main'):
- if self.is_merging():
- return False, {
- 'error':
- 'Cannot create branch while merging. Resolve conflicts first.'
- }
- return create_branch(self.repo_path, branch_name, base_branch)
-
- def checkout(self, branch_name):
- if self.is_merging():
- return False, {
- 'error':
- 'Cannot checkout while merging. Resolve conflicts first.'
- }
- return checkout_branch(self.repo_path, branch_name)
-
- def delete(self, branch_name):
- if self.is_merging():
- return False, {
- 'error':
- 'Cannot delete branch while merging. Resolve conflicts first.'
- }
- return delete_branch(self.repo_path, branch_name)
-
- def get_all(self):
- return get_branches(self.repo_path)
-
- def get_current(self):
- return get_current_branch(self.repo_path)
-
- def push(self, branch_name):
- if self.is_merging():
- return False, {
- 'error': 'Cannot push while merging. Resolve conflicts first.'
- }
- return push_branch_to_remote(self.repo_path, branch_name)
diff --git a/backend/app/git/branches/push.py b/backend/app/git/branches/push.py
deleted file mode 100644
index 212971d..0000000
--- a/backend/app/git/branches/push.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# git/branches/push.py
-import git
-import logging
-from ..auth.authenticate import GitHubAuth
-
-logger = logging.getLogger(__name__)
-logger.setLevel(logging.DEBUG)
-
-
-def _handle_git_error(error):
- """Helper function to process git errors and return user-friendly messages"""
- error_msg = str(error)
- if "403" in error_msg:
- return "Authentication failed: The provided PAT doesn't have sufficient permissions or is invalid."
- elif "401" in error_msg:
- return "Authentication failed: No PAT provided or the token is invalid."
- elif "non-fast-forward" in error_msg:
- return "Push rejected: Remote contains work that you do not have locally. Please pull the latest changes first."
- return f"Git error: {error_msg}"
-
-
-def push_branch_to_remote(repo_path, branch_name):
- try:
- logger.debug(f"Attempting to push branch {branch_name} to remote")
-
- # Verify token before attempting push
- if not GitHubAuth.verify_token():
- return False, "Push operation requires GitHub authentication. Please configure PAT."
-
- repo = git.Repo(repo_path)
-
- # Check if the branch exists locally
- if branch_name not in repo.heads:
- return False, f"Branch '{branch_name}' does not exist locally."
-
- origin = repo.remote(name='origin')
- original_url = origin.url
-
- try:
- # Set authenticated URL
- auth_url = GitHubAuth.get_authenticated_url(original_url)
- origin.set_url(auth_url)
-
- # Push the branch to remote and set the upstream branch
- origin.push(refspec=f"{branch_name}:{branch_name}",
- set_upstream=True)
- return True, f"Pushed branch to remote: {branch_name}"
-
- except git.GitCommandError as e:
- return False, _handle_git_error(e)
-
- finally:
- # Always restore original URL
- origin.set_url(original_url)
-
- except Exception as e:
- logger.error(f"Error pushing branch to remote: {str(e)}",
- exc_info=True)
- return False, str(e)
diff --git a/backend/app/git/operations/commit.py b/backend/app/git/operations/commit.py
deleted file mode 100644
index bfe71c4..0000000
--- a/backend/app/git/operations/commit.py
+++ /dev/null
@@ -1,135 +0,0 @@
-# git/operations/commit.py
-import git
-import os
-import logging
-from ..status.status import GitStatusManager
-
-logger = logging.getLogger(__name__)
-
-
-def parse_git_status(status_output):
- """
- Parse git status --porcelain output into a structured format.
-
- Returns dict with staged and unstaged changes, identifying status of each file.
- """
- changes = {}
- for line in status_output:
- if not line:
- continue
-
- index_status = line[0] # First character: staged status
- worktree_status = line[1] # Second character: unstaged status
- file_path = line[3:]
-
- changes[file_path] = {
- 'staged': index_status != ' ',
- 'staged_status': index_status,
- 'unstaged_status': worktree_status
- }
-
- return changes
-
-
-def commit_changes(repo_path, files, message):
- """
- Commit changes to git repository, optimizing staging operations.
- Only re-stages files if their current staging status is incorrect.
-
- Args:
- repo_path: Path to git repository
- files: List of files to commit, or None/empty for all staged changes
- message: Commit message
-
- Returns:
- tuple: (success: bool, message: str)
- """
- try:
- repo = git.Repo(repo_path)
-
- # If no specific files provided, commit all staged changes
- if not files:
- commit = repo.index.commit(message)
- # Update remote status after commit
- status_manager = GitStatusManager.get_instance(repo_path)
- if status_manager:
- status_manager.update_remote_status()
- return True, "Successfully committed all staged changes."
-
- # Get current status of the repository
- status_output = repo.git.status('--porcelain').splitlines()
- status = parse_git_status(status_output)
-
- # Track files that need staging operations
- to_add = []
- to_remove = []
- already_staged = []
-
- for file_path in files:
- if file_path in status:
- file_status = status[file_path]
-
- # File is already properly staged
- if file_status['staged']:
- if file_status['staged_status'] == 'D':
- already_staged.append(('deleted', file_path))
- else:
- already_staged.append(('modified', file_path))
- continue
-
- # File needs to be staged
- if file_status['unstaged_status'] == 'D':
- to_remove.append(file_path)
- else:
- to_add.append(file_path)
- else:
- logger.warning(f"File not found in git status: {file_path}")
-
- # Perform necessary staging operations
- if to_add:
- logger.debug(f"Staging modified files: {to_add}")
- repo.index.add(to_add)
-
- if to_remove:
- logger.debug(f"Staging deleted files: {to_remove}")
- repo.index.remove(to_remove, working_tree=True)
-
- # Commit the changes
- commit = repo.index.commit(message)
-
- # Update remote status after commit
- status_manager = GitStatusManager.get_instance(repo_path)
- if status_manager:
- status_manager.update_remote_status()
-
- # Build detailed success message
- staged_counts = {
- 'added/modified': len(to_add),
- 'deleted': len(to_remove),
- 'already_staged': len(already_staged)
- }
-
- message_parts = []
- if staged_counts['added/modified']:
- message_parts.append(
- f"{staged_counts['added/modified']} files staged")
- if staged_counts['deleted']:
- message_parts.append(
- f"{staged_counts['deleted']} deletions staged")
- if staged_counts['already_staged']:
- message_parts.append(
- f"{staged_counts['already_staged']} files already staged")
-
- if message_parts:
- details = " and ".join(message_parts)
- return True, f"Successfully committed changes ({details})"
- else:
- return True, "Successfully committed changes (no files needed staging)"
-
- except git.exc.GitCommandError as e:
- logger.error(f"Git command error committing changes: {str(e)}",
- exc_info=True)
- return False, f"Error committing changes: {str(e)}"
- except Exception as e:
- logger.error(f"Error committing changes: {str(e)}", exc_info=True)
- return False, f"Error committing changes: {str(e)}"
diff --git a/backend/app/git/operations/manager.py b/backend/app/git/operations/manager.py
deleted file mode 100644
index fbba869..0000000
--- a/backend/app/git/operations/manager.py
+++ /dev/null
@@ -1,54 +0,0 @@
-import git
-from .stage import stage_files
-from .commit import commit_changes
-from .push import push_changes
-from .revert import revert_file, revert_all
-from .delete import delete_file
-from .pull import pull_branch
-from .unstage import unstage_files
-from .merge import abort_merge, finalize_merge
-from .resolve import resolve_conflicts
-import logging
-
-logger = logging.getLogger(__name__)
-
-
-class GitOperations:
-
- def __init__(self, repo_path):
- self.repo_path = repo_path
-
- def stage(self, files):
- return stage_files(self.repo_path, files)
-
- def unstage(self, files):
- return unstage_files(self.repo_path, files)
-
- def commit(self, files, message):
- return commit_changes(self.repo_path, files, message)
-
- def push(self):
- return push_changes(self.repo_path)
-
- def revert(self, file_path):
- return revert_file(self.repo_path, file_path)
-
- def revert_all(self):
- return revert_all(self.repo_path)
-
- def delete(self, file_path):
- return delete_file(self.repo_path, file_path)
-
- def pull(self, branch_name):
- return pull_branch(self.repo_path, branch_name)
-
- def finalize_merge(self):
- repo = git.Repo(self.repo_path)
- return finalize_merge(repo)
-
- def abort_merge(self):
- return abort_merge(self.repo_path)
-
- def resolve(self, resolutions):
- repo = git.Repo(self.repo_path)
- return resolve_conflicts(repo, resolutions)
diff --git a/backend/app/git/operations/push.py b/backend/app/git/operations/push.py
deleted file mode 100644
index dd4eb3f..0000000
--- a/backend/app/git/operations/push.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# git/operations/push.py
-import git
-import logging
-from ..auth.authenticate import GitHubAuth
-from ..status.status import GitStatusManager
-
-logger = logging.getLogger(__name__)
-
-
-def _handle_git_error(error):
- """Helper function to process git errors and return user-friendly messages"""
- error_msg = str(error)
- if "403" in error_msg:
- return "Authentication failed: The provided PAT doesn't have sufficient permissions or is invalid."
- elif "401" in error_msg:
- return "Authentication failed: No PAT provided or the token is invalid."
- elif "non-fast-forward" in error_msg:
- return "Push rejected: Remote contains work that you do not have locally. Please pull the latest changes first."
- return f"Git error: {error_msg}"
-
-
-def push_changes(repo_path):
- try:
- # Verify token before attempting push
- if not GitHubAuth.verify_token():
- return False, "Push operation requires GitHub authentication. Please configure PAT."
-
- repo = git.Repo(repo_path)
- origin = repo.remote(name='origin')
- original_url = origin.url
-
- try:
- # Set authenticated URL
- auth_url = GitHubAuth.get_authenticated_url(original_url)
- origin.set_url(auth_url)
-
- # Push changes
- push_info = origin.push()
-
- if push_info and push_info[0].flags & push_info[0].ERROR:
- raise git.GitCommandError("git push", push_info[0].summary)
-
- # Update remote status after successful push
- status_manager = GitStatusManager.get_instance(repo_path)
- if status_manager:
- status_manager.update_remote_status()
-
- return True, "Successfully pushed changes."
-
- finally:
- # Always restore original URL
- origin.set_url(original_url)
-
- except git.GitCommandError as e:
- logger.error(f"Git command error during push: {str(e)}")
- return False, _handle_git_error(e)
- except Exception as e:
- logger.error(f"Error pushing changes: {str(e)}", exc_info=True)
- return False, str(e)
diff --git a/backend/app/git/operations/stage.py b/backend/app/git/operations/stage.py
deleted file mode 100644
index 4594524..0000000
--- a/backend/app/git/operations/stage.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# git/operations/stage.py
-import git
-import os
-import logging
-
-logger = logging.getLogger(__name__)
-
-
-def stage_files(repo_path, files):
- """
- Stage files in git repository, properly handling both existing and deleted files.
-
- Args:
- repo_path: Path to git repository
- files: List of files to stage, or None/empty list to stage all changes
-
- Returns:
- tuple: (success: bool, message: str)
- """
- try:
- repo = git.Repo(repo_path)
-
- # Stage all changes if no specific files provided
- if not files:
- repo.git.add(A=True)
- return True, "All changes have been staged."
-
- # Handle specific files
- existing_files = []
- deleted_files = []
-
- # Separate existing and deleted files
- for file_path in files:
- full_path = os.path.join(repo_path, file_path)
- if os.path.exists(full_path):
- existing_files.append(file_path)
- else:
- # Check if file is tracked but deleted
- try:
- repo.git.ls_files(file_path, error_unmatch=True)
- deleted_files.append(file_path)
- except git.exc.GitCommandError:
- logger.warning(f"Untracked file not found: {file_path}")
- continue
-
- # Stage existing files
- if existing_files:
- repo.index.add(existing_files)
-
- # Stage deleted files
- if deleted_files:
- repo.index.remove(deleted_files, working_tree=True)
-
- message_parts = []
- if existing_files:
- message_parts.append(
- f"{len(existing_files)} existing files staged")
- if deleted_files:
- message_parts.append(f"{len(deleted_files)} deleted files staged")
-
- message = " and ".join(
- message_parts) if message_parts else "No files staged"
- return True, message
-
- except git.exc.GitCommandError as e:
- logger.error(f"Git command error staging files: {str(e)}",
- exc_info=True)
- return False, f"Error staging files: {str(e)}"
- except Exception as e:
- logger.error(f"Error staging files: {str(e)}", exc_info=True)
- return False, f"Error staging files: {str(e)}"
diff --git a/backend/app/git/operations/types.py b/backend/app/git/operations/types.py
deleted file mode 100644
index f7bf94c..0000000
--- a/backend/app/git/operations/types.py
+++ /dev/null
@@ -1,52 +0,0 @@
-from dataclasses import dataclass
-from typing import List, Dict, Optional, Literal
-from enum import Enum
-
-
-class FileType(str, Enum):
- REGEX = "regex"
- CUSTOM_FORMAT = "custom format"
- QUALITY_PROFILE = "quality profile"
-
-
-class ResolutionChoice(str, Enum):
- LOCAL = "local"
- INCOMING = "incoming"
-
-
-@dataclass
-class TagConflict:
- tag: str
- local_status: Literal["Present", "Absent"]
- incoming_status: Literal["Present", "Absent"]
- resolution: Optional[ResolutionChoice] = None
-
-
-@dataclass
-class FormatConflict:
- format_id: str
- local_score: Optional[int]
- incoming_score: Optional[int]
- resolution: Optional[ResolutionChoice] = None
-
-
-@dataclass
-class GeneralConflict:
- key: str
- local_value: any
- incoming_value: any
- resolution: Optional[ResolutionChoice] = None
-
-
-@dataclass
-class FileResolution:
- file_type: FileType
- filename: str
- tags: List[TagConflict]
- formats: List[FormatConflict]
- general: List[GeneralConflict]
-
-
-@dataclass
-class ResolutionRequest:
- resolutions: Dict[str, FileResolution]
diff --git a/backend/app/git/operations/unstage.py b/backend/app/git/operations/unstage.py
deleted file mode 100644
index 7890ea7..0000000
--- a/backend/app/git/operations/unstage.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# git/operations/unstage.py
-import git
-import logging
-
-logger = logging.getLogger(__name__)
-
-
-def unstage_files(repo_path, files):
- try:
- repo = git.Repo(repo_path)
- repo.index.reset(files=files)
- return True, "Successfully unstaged files."
- except Exception as e:
- logger.error(f"Error unstaging files: {str(e)}", exc_info=True)
- return False, f"Error unstaging files: {str(e)}"
diff --git a/backend/app/git/status/commit_history.py b/backend/app/git/status/commit_history.py
deleted file mode 100644
index 9d37a54..0000000
--- a/backend/app/git/status/commit_history.py
+++ /dev/null
@@ -1,159 +0,0 @@
-# status/commit_history.py
-
-import git
-from datetime import datetime
-import logging
-
-logger = logging.getLogger(__name__)
-
-
-def format_commit(commit, repo, tracking_branch=None):
- """Helper function to format a single commit's information"""
- # Check if it's a merge commit
- is_merge = len(commit.parents) > 1
-
- # Get the remote URL for the commit if possible
- remote_url = None
- if tracking_branch:
- remote_url = repo.remote().url
- if remote_url.endswith('.git'):
- remote_url = remote_url[:-4]
- remote_url += f"/commit/{commit.hexsha}"
-
- commit_info = {
- 'hash': commit.hexsha,
- 'message': commit.message.strip(),
- 'author': f"{commit.author.name} <{commit.author.email}>",
- 'date': commit.committed_datetime.isoformat(),
- 'isMerge': is_merge,
- 'remoteUrl': remote_url,
- 'details': {
- 'files_changed': [],
- 'insertions': 0,
- 'deletions': 0
- }
- }
-
- # Get detailed stats
- try:
- if len(commit.parents) > 0:
- # Get the diff between this commit and its first parent
- diff = commit.parents[0].diff(commit)
-
- # Initialize stats
- stats = {'files_changed': [], 'insertions': 0, 'deletions': 0}
-
- # Get the total diff stats using git diff --numstat
- raw_stats = repo.git.diff(commit.parents[0].hexsha,
- commit.hexsha,
- numstat=True).splitlines()
-
- for line in raw_stats:
- if not line.strip():
- continue
- adds, dels, file_path = line.split('\t')
- # Handle binary files which show up as '-' in numstat
- if adds != '-' and dels != '-':
- stats['insertions'] += int(adds)
- stats['deletions'] += int(dels)
- stats['files_changed'].append(file_path)
-
- commit_info['details'] = stats
-
- except Exception as e:
- logger.debug(f"Error getting commit details: {e}")
-
- return commit_info
-
-
-def get_git_commit_history(repo_path, branch=None):
- """
- Get both local and remote commit history for the repository.
-
- Args:
- repo_path (str): Path to the git repository
- branch (str, optional): Branch name to get history for. Defaults to current branch.
-
- Returns:
- tuple: (success: bool, result: dict/str)
- On success, returns (True, {
- 'local_commits': [...],
- 'remote_commits': [...],
- 'ahead_count': int,
- 'behind_count': int,
- 'branch': str,
- 'has_remote': bool
- })
- On failure, returns (False, error_message)
- """
- try:
- repo = git.Repo(repo_path)
- current_branch = repo.active_branch
- branch_to_check = branch if branch else current_branch.name
-
- # Get the tracking branch
- tracking_branch = None
- try:
- tracking_branch = repo.active_branch.tracking_branch()
- except Exception as e:
- logger.debug(f"No tracking branch found: {e}")
-
- local_commits = []
- remote_commits = []
- ahead_count = 0
- behind_count = 0
-
- if tracking_branch:
- try:
- # Find the merge base (common ancestor)
- merge_base = repo.merge_base(tracking_branch,
- current_branch)[0]
-
- # Get commits that are in local but not in remote (ahead)
- local_commits = [
- format_commit(commit, repo, tracking_branch)
- for commit in repo.iter_commits(
- f"{tracking_branch.name}..{current_branch.name}")
- ]
- ahead_count = len(local_commits)
-
- # Get commits that are in remote but not in local (behind)
- remote_commits = [
- format_commit(commit, repo, tracking_branch)
- for commit in repo.iter_commits(
- f"{current_branch.name}..{tracking_branch.name}")
- ]
- behind_count = len(remote_commits)
-
- # If no divergence, get recent commits from current branch
- if not local_commits and not remote_commits:
- local_commits = [
- format_commit(commit, repo, tracking_branch)
- for commit in repo.iter_commits(current_branch.name,
- max_count=50)
- ]
-
- except git.GitCommandError as e:
- logger.error(f"Git command error while getting commits: {e}")
- return False, f"Error getting commits: {str(e)}"
-
- else:
- # If no tracking branch, just get recent local commits
- local_commits = [
- format_commit(commit, repo)
- for commit in repo.iter_commits(current_branch.name,
- max_count=50)
- ]
-
- return True, {
- 'local_commits': local_commits,
- 'remote_commits': remote_commits,
- 'ahead_count': ahead_count,
- 'behind_count': behind_count,
- 'branch': branch_to_check,
- 'has_remote': tracking_branch is not None
- }
-
- except Exception as e:
- logger.exception("Error getting commit history")
- return False, f"Unexpected error getting commit history: {str(e)}"
diff --git a/backend/app/git/status/comparison.py b/backend/app/git/status/comparison.py
deleted file mode 100644
index 9272db4..0000000
--- a/backend/app/git/status/comparison.py
+++ /dev/null
@@ -1,232 +0,0 @@
-import logging
-import os
-from typing import Any, Dict, List, Optional
-
-logger = logging.getLogger(__name__)
-
-
-def compare_yaml(old_data: Any,
- new_data: Any,
- path: str = "") -> List[Dict[str, Any]]:
- """
- Recursively compare two YAML structures and generate a list of changes.
- Handles nested structures including:
- - Simple values (strings, numbers, booleans)
- - Lists of primitives (like tags: ['1080p', 'x264'])
- - Lists of objects (like custom_formats: [{name: 'DON', score: 80}])
- - Nested objects (like qualities: {id: 1, name: 'HD', qualities: [...]})
-
- Args:
- old_data: Original data structure
- new_data: New data structure to compare against
- path: Current path in the data structure (for tracking nested changes)
-
- Returns:
- List of changes, where each change is a dict containing:
- {
- key: Path to the changed field (e.g. "custom_formats[DON].score")
- change: 'added' | 'removed' | 'modified'
- from: Original value (for modified/removed)
- to: New value (for modified/added)
- value: List of values (for array additions/removals)
- }
- """
- logger.debug(f"Comparing path: {path or 'root'}")
- changes = []
-
- if old_data is None and new_data is None:
- return changes
-
- if old_data is None and new_data is not None:
- if isinstance(new_data, dict):
- old_data = {}
- elif isinstance(new_data, list):
- old_data = []
- else:
- old_data = None
-
- if old_data is not None and new_data is None:
- logger.debug(f"Path {path} removed")
- return [{"key": path, "change": "removed", "from": old_data}]
-
- if type(old_data) != type(new_data):
- logger.debug(
- f"Type mismatch at {path}: {type(old_data)} → {type(new_data)}")
- return [{
- "key": path,
- "change": "modified",
- "from": old_data,
- "to": new_data
- }]
-
- if isinstance(old_data, list):
- has_objects = any(
- isinstance(x, dict) for x in old_data + new_data if x is not None)
- if has_objects:
- try:
- old_dict = {x.get("name"): x for x in old_data if x}
- new_dict = {x.get("name"): x for x in new_data if x}
- added = set(new_dict) - set(old_dict)
- removed = set(old_dict) - set(new_dict)
- common = set(old_dict) & set(new_dict)
-
- if added:
- logger.debug(f"Added items at {path}: {added}")
- if removed:
- logger.debug(f"Removed items at {path}: {removed}")
-
- for key in added:
- changes.append({
- "key": f"{path}[{key}]",
- "change": "added",
- "to": new_dict[key]
- })
- for key in removed:
- changes.append({
- "key": f"{path}[{key}]",
- "change": "removed",
- "from": old_dict[key]
- })
- for key in common:
- if old_dict[key] != new_dict[key]:
- logger.debug(
- f"Found changes in common item {key} at {path}")
- changes.extend(
- compare_yaml(old_dict[key], new_dict[key],
- f"{path}[{key}]"))
- except Exception as e:
- logger.warning(
- f"Failed to compare by name at {path}, falling back to index comparison: {str(e)}"
- )
- for i, (old_item,
- new_item) in enumerate(zip(old_data, new_data)):
- if old_item != new_item:
- changes.extend(
- compare_yaml(old_item, new_item, f"{path}[{i}]"))
- else:
- old_set = set(old_data)
- new_set = set(new_data)
- if added := new_set - old_set:
- logger.debug(f"Added values at {path}: {added}")
- changes.append({
- "key": path,
- "change": "added",
- "value": sorted([x for x in added if x is not None])
- })
- if removed := old_set - new_set:
- logger.debug(f"Removed values at {path}: {removed}")
- changes.append({
- "key": path,
- "change": "removed",
- "value": sorted([x for x in removed if x is not None])
- })
-
- elif isinstance(old_data, dict):
- all_keys = set(old_data) | set(new_data)
- for key in all_keys:
- new_path = f"{path}.{key}" if path else key
- if key not in old_data:
- logger.debug(f"Added key at {new_path}")
- changes.append({
- "key": new_path,
- "change": "added",
- "to": new_data[key]
- })
- elif key not in new_data:
- logger.debug(f"Removed key at {new_path}")
- changes.append({
- "key": new_path,
- "change": "removed",
- "from": old_data[key]
- })
- else:
- changes.extend(
- compare_yaml(old_data[key], new_data[key], new_path))
- else:
- if old_data != new_data:
- logger.debug(f"Modified value at {path}: {old_data} → {new_data}")
- changes.append({
- "key": path,
- "change": "modified",
- "from": old_data,
- "to": new_data
- })
-
- for c in changes:
- if c["change"] == "added" and "from" not in c:
- c["from"] = "~"
- return changes
-
-
-def normalize_yaml_keys(data):
- """Convert boolean keys to strings in YAML data to avoid JSON serialization issues"""
- if isinstance(data, dict):
- return {str(k): normalize_yaml_keys(v) for k, v in data.items()}
- elif isinstance(data, list):
- return [normalize_yaml_keys(item) for item in data]
- else:
- return data
-
-
-def create_change_summary(old_data: Optional[Dict], new_data: Optional[Dict],
- file_path: str) -> Dict[str, Any]:
- """
- Create a summary of changes between two YAML structures with file metadata.
- This wrapper adds git-specific fields like name, status, and file path.
- Args:
- old_data: Original YAML data (from git HEAD)
- new_data: New YAML data (from working directory)
- file_path: Path to the file being compared
-
- Returns:
- Dict containing:
- - name: Current name (from new_data or filename)
- - prior_name: Previous name (from old_data)
- - outgoing_name: New name if changed, else None
- - status: 'New' | 'Modified' | 'Deleted'
- - file_path: Path to the file
- - modified: True if file was modified/added
- - deleted: True if file was deleted
- - changes: Detailed changes from compare_yaml
- """
- try:
- # Normalize keys to avoid JSON serialization issues with boolean keys
- old_data = normalize_yaml_keys(old_data) if old_data else None
- new_data = normalize_yaml_keys(new_data) if new_data else None
- filename = os.path.basename(file_path)
- new_name = new_data.get("name") if new_data else None
- old_name = old_data.get("name") if old_data else None
- current_name = new_name or filename
-
- if old_data is None and new_data is not None:
- status = "New"
- logger.info(f"New file detected: {file_path}")
- elif old_data is not None and new_data is None:
- status = "Deleted"
- logger.info(f"Deleted file detected: {file_path}")
- else:
- status = "Modified"
- logger.info(f"Modified file detected: {file_path}")
-
- detailed_changes = compare_yaml(old_data, new_data)
-
- if detailed_changes:
- logger.info(
- f"Found {len(detailed_changes)} changes in {file_path}")
- logger.debug(f"Detailed changes: {detailed_changes}")
-
- return {
- "name": current_name,
- "prior_name": old_name,
- "outgoing_name": new_name if new_name != old_name else None,
- "status": status,
- "file_path": file_path,
- "modified": status != "Deleted",
- "deleted": status == "Deleted",
- "changes": detailed_changes
- }
- except Exception as e:
- logger.error(
- f"Error creating change summary for {file_path}: {str(e)}",
- exc_info=True)
- raise
diff --git a/backend/app/git/status/conflict_comparison.py b/backend/app/git/status/conflict_comparison.py
deleted file mode 100644
index 5260fe2..0000000
--- a/backend/app/git/status/conflict_comparison.py
+++ /dev/null
@@ -1,283 +0,0 @@
-import os
-import yaml
-import logging
-from typing import Any, Dict, List, Optional, Union
-
-logger = logging.getLogger(__name__)
-
-# Define conflict states
-UNRESOLVED = "UNRESOLVED"
-RESOLVED = "RESOLVED"
-MODIFY_DELETE = "MODIFY_DELETE"
-
-
-def compare_conflict_yaml(ours_data: Any,
- theirs_data: Any,
- path: str = "") -> List[Dict[str, Any]]:
- """
- Compare two YAML structures and generate conflict information.
- Handles nested structures and produces conflict records in the format:
- {
- 'parameter': 'Field Name',
- 'local_value': value_from_ours,
- 'incoming_value': value_from_theirs
- }
- """
- conflicts = []
-
- # Handle None/deletion cases
- if ours_data is None and theirs_data is None:
- return conflicts
- if ours_data is None:
- # Local version deleted
- param_name = path or 'File'
- return [{
- 'parameter': param_name,
- 'local_value': '🗑️ File deleted in local version',
- 'incoming_value': '📄 File exists in incoming version'
- }]
- if theirs_data is None:
- # Incoming version deleted
- param_name = path or 'File'
- return [{
- 'parameter': param_name,
- 'local_value': '📄 File exists in local version',
- 'incoming_value': '🗑️ File deleted in incoming version'
- }]
-
- # Handle different types as conflicts
- if type(ours_data) != type(theirs_data):
- return [{
- 'parameter': path,
- 'local_value': ours_data,
- 'incoming_value': theirs_data
- }]
-
- # Handle lists
- if isinstance(ours_data, list):
- # Check if list contains objects
- has_objects = any(
- isinstance(x, dict) for x in ours_data + theirs_data
- if x is not None)
-
- if has_objects:
- return compare_object_arrays(ours_data, theirs_data, path)
- else:
- return compare_primitive_arrays(ours_data, theirs_data, path)
-
- # Handle dictionaries
- elif isinstance(ours_data, dict):
- return compare_dicts(ours_data, theirs_data, path)
-
- # Handle primitive values
- elif ours_data != theirs_data:
- return [{
- 'parameter': path,
- 'local_value': ours_data,
- 'incoming_value': theirs_data
- }]
-
- return conflicts
-
-
-def compare_object_arrays(ours_data: List[Dict], theirs_data: List[Dict],
- path: str) -> List[Dict]:
- """Compare arrays of objects using name field as identifier"""
- conflicts = []
-
- try:
- # Build lookup dictionaries
- ours_dict = {x.get('name'): x for x in ours_data if x}
- theirs_dict = {x.get('name'): x for x in theirs_data if x}
-
- # Find additions/removals
- ours_keys = set(ours_dict.keys())
- theirs_keys = set(theirs_dict.keys())
-
- # Handle added items
- for key in (theirs_keys - ours_keys):
- conflicts.append({
- 'parameter': f"{path}[{key}]" if path else key,
- 'local_value': None,
- 'incoming_value': theirs_dict[key]
- })
-
- # Handle removed items
- for key in (ours_keys - theirs_keys):
- conflicts.append({
- 'parameter': f"{path}[{key}]" if path else key,
- 'local_value': ours_dict[key],
- 'incoming_value': None
- })
-
- # Compare common items
- for key in (ours_keys & theirs_keys):
- if ours_dict[key] != theirs_dict[key]:
- new_path = f"{path}[{key}]" if path else key
- conflicts.extend(
- compare_conflict_yaml(ours_dict[key], theirs_dict[key],
- new_path))
-
- except Exception as e:
- logger.warning(
- f"Failed to compare objects by name at {path}, using positional comparison: {str(e)}"
- )
- # Fallback to positional comparison
- for i, (ours_item,
- theirs_item) in enumerate(zip(ours_data, theirs_data)):
- if ours_item != theirs_item:
- new_path = f"{path}[{i}]" if path else str(i)
- conflicts.extend(
- compare_conflict_yaml(ours_item, theirs_item, new_path))
-
- return conflicts
-
-
-def compare_primitive_arrays(ours_data: List, theirs_data: List,
- path: str) -> List[Dict]:
- """Compare arrays of primitive values"""
- conflicts = []
-
- ours_set = set(ours_data)
- theirs_set = set(theirs_data)
-
- # Handle additions
- added = theirs_set - ours_set
- if added:
- conflicts.append({
- 'parameter': path or 'Array',
- 'local_value': sorted(list(ours_set)),
- 'incoming_value': sorted(list(theirs_set))
- })
-
- return conflicts
-
-
-def format_array_for_display(data):
- """Format array data for display in conflict resolution"""
- if isinstance(data, list):
- if not data:
- return "[] (empty array)"
- elif all(isinstance(x, dict) and 'name' in x for x in data):
- # Array of objects with names - show the names
- names = [x['name'] for x in data]
- if len(names) <= 5:
- return f"[{', '.join(names)}]"
- else:
- return f"[{', '.join(names[:5])}, ... and {len(names) - 5} more]"
- elif all(not isinstance(x, (dict, list)) for x in data):
- # Array of primitives
- if len(data) <= 5:
- return f"[{', '.join(str(x) for x in data)}]"
- else:
- return f"[{', '.join(str(x) for x in data[:5])}, ... and {len(data) - 5} more]"
- else:
- # Mixed or complex array
- return f"Array with {len(data)} items"
- return data
-
-
-def compare_dicts(ours_data: Dict, theirs_data: Dict, path: str) -> List[Dict]:
- """Compare dictionaries recursively"""
- conflicts = []
-
- # Get all keys from both dictionaries
- all_keys = set(ours_data.keys()) | set(theirs_data.keys())
-
- for key in all_keys:
- new_path = f"{path}.{key}" if path else key
-
- if key not in ours_data:
- # Format arrays for better display when field is missing locally
- incoming_val = theirs_data[key]
- if isinstance(incoming_val, list):
- incoming_val = format_array_for_display(incoming_val)
- conflicts.append({
- 'parameter': new_path,
- 'local_value': None,
- 'incoming_value': incoming_val
- })
- elif key not in theirs_data:
- # Format arrays for better display when field is missing remotely
- local_val = ours_data[key]
- if isinstance(local_val, list):
- local_val = format_array_for_display(local_val)
- conflicts.append({
- 'parameter': new_path,
- 'local_value': local_val,
- 'incoming_value': None
- })
- elif ours_data[key] != theirs_data[key]:
- conflicts.extend(
- compare_conflict_yaml(ours_data[key], theirs_data[key],
- new_path))
-
- return conflicts
-
-
-def create_conflict_summary(file_path: str,
- ours_data: Optional[Dict],
- theirs_data: Optional[Dict],
- status: str = UNRESOLVED) -> Dict[str, Any]:
- """
- Create a summary of conflicts between two versions of a file.
-
- Args:
- file_path: Path to the file in conflict
- ours_data: Our version of the YAML data
- theirs_data: Their version of the YAML data
- status: Conflict status (UNRESOLVED, RESOLVED, or MODIFY_DELETE)
-
- Returns:
- Dict containing:
- - file_path: Path to the conflicted file
- - type: Type of item
- - name: Name from our version or filename
- - incoming_name: Name from their version (if available)
- - status: Current conflict status
- - conflict_details: List of specific conflicts
- """
- try:
- from .utils import determine_type # Import here to avoid circular imports
-
- # Generate conflict details
- conflict_details = {
- 'conflicting_parameters':
- compare_conflict_yaml(ours_data, theirs_data)
- }
-
- # Get local name
- local_name = None
- if ours_data and isinstance(ours_data, dict) and 'name' in ours_data:
- local_name = ours_data.get('name')
-
- if not local_name:
- # Strip the extension to get a cleaner name
- basename = os.path.basename(file_path)
- local_name = os.path.splitext(basename)[0]
-
- # Get incoming name
- incoming_name = None
- if theirs_data and isinstance(theirs_data, dict) and 'name' in theirs_data:
- incoming_name = theirs_data.get('name')
-
- if not incoming_name:
- # Strip the extension to get a cleaner name
- basename = os.path.basename(file_path)
- incoming_name = os.path.splitext(basename)[0]
-
- result = {
- 'file_path': file_path,
- 'type': determine_type(file_path),
- 'name': local_name,
- 'incoming_name': incoming_name,
- 'status': status,
- 'conflict_details': conflict_details
- }
-
- return result
-
- except Exception as e:
- logger.error(
- f"Failed to create conflict summary for {file_path}: {str(e)}")
- return None
diff --git a/backend/app/git/status/incoming_changes.py b/backend/app/git/status/incoming_changes.py
deleted file mode 100644
index 101dad0..0000000
--- a/backend/app/git/status/incoming_changes.py
+++ /dev/null
@@ -1,229 +0,0 @@
-import os
-import yaml
-import logging
-from git import GitCommandError
-from .comparison import create_change_summary
-from .utils import determine_type, parse_commit_message, extract_name_from_path
-
-logger = logging.getLogger(__name__)
-
-
-# Use the centralized extract_name_from_path function from utils
-extract_name = extract_name_from_path
-
-
-def check_merge_conflict(repo, branch, file_path):
- """Check if pulling a file would cause merge conflicts"""
- try:
- # Check for local changes (uncommitted or unpushed)
- status = repo.git.status('--porcelain', file_path).strip()
- if status:
- status_code = status[:2] if len(status) >= 2 else ''
- has_changes = 'M' in status_code or 'A' in status_code or 'D' in status_code or 'R' in status_code
- else:
- # Check for unpushed commits
- merge_base = repo.git.merge_base('HEAD',
- f'origin/{branch}').strip()
- committed_changes = repo.git.log(f'{merge_base}..HEAD',
- '--',
- file_path,
- ignore_missing=True).strip()
- has_changes = bool(committed_changes)
-
- if has_changes:
- # Test if merge would cause conflicts
- try:
- merge_test = repo.git.merge_tree('--write-tree', 'HEAD',
- f'origin/{branch}')
- return any(
- line.startswith('<<<<<<< ')
- for line in merge_test.splitlines() if file_path in line)
- except GitCommandError:
- return True # Assume conflict if merge test fails
-
- return False
- except Exception as e:
- logger.error(f"Failed to check conflicts for {file_path}: {str(e)}")
- return False
-
-
-def get_commit_message(repo, branch, file_path):
- """Get commit message for incoming changes to a file"""
- try:
- raw_message = repo.git.show(f'HEAD...origin/{branch}', '--format=%B',
- '-s', '--', file_path).strip()
- return parse_commit_message(raw_message)
- except GitCommandError as e:
- logger.error(
- f"Git command error getting commit message for {file_path}: {str(e)}"
- )
- return {
- "body": "",
- "footer": "",
- "scope": "",
- "subject": f"Error retrieving commit message: {str(e)}",
- "type": ""
- }
-
-
-def parse_commit_message(message):
- """Parse a commit message into its components"""
- try:
- # Default structure
- parsed = {
- "type": "Unknown Type",
- "scope": "Unknown Scope",
- "subject": "",
- "body": "",
- "footer": ""
- }
-
- if not message:
- return parsed
-
- # Split message into lines
- lines = message.strip().split('\n')
-
- # Parse first line (header)
- if lines:
- header = lines[0]
-
- # Try to parse conventional commit format: type(scope): subject
- import re
- conventional_format = re.match(r'^(\w+)(?:\(([^)]+)\))?: (.+)$',
- header)
-
- if conventional_format:
- groups = conventional_format.groups()
- parsed.update({
- "type": groups[0] or "Unknown Type",
- "scope": groups[1] or "Unknown Scope",
- "subject": groups[2]
- })
- else:
- parsed["subject"] = header
-
- # Parse body and footer
- if len(lines) > 1:
- # Find the divider between body and footer (if any)
- footer_start = -1
- for i, line in enumerate(lines[1:], 1):
- if re.match(r'^[A-Z_-]+:', line):
- footer_start = i
- break
-
- # Extract body and footer
- if footer_start != -1:
- parsed["body"] = '\n'.join(lines[1:footer_start]).strip()
- parsed["footer"] = '\n'.join(lines[footer_start:]).strip()
- else:
- parsed["body"] = '\n'.join(lines[1:]).strip()
-
- return parsed
-
- except Exception as e:
- logger.error(f"Error parsing commit message: {str(e)}")
- return {
- "type": "Unknown Type",
- "scope": "Unknown Scope",
- "subject": "Error parsing commit message",
- "body": "",
- "footer": ""
- }
-
-
-def get_incoming_changes(repo, branch):
- """Get list of changes that would come in from origin"""
- try:
- # Get status including renames
- diff_output = repo.git.diff(f'HEAD...origin/{branch}', '--name-status',
- '-M').split('\n')
- changed_files = []
- rename_mapping = {}
-
- # Process status to identify renames
- for line in diff_output:
- if not line:
- continue
- parts = line.split('\t')
- if len(parts) < 2:
- continue
-
- status = parts[0]
- if status.startswith('R'):
- old_path, new_path = parts[1], parts[2]
- rename_mapping[new_path] = old_path
- changed_files.append(new_path)
- else:
- changed_files.append(parts[1])
-
- logger.info(f"Processing {len(changed_files)} incoming changes")
-
- incoming_changes = []
- for file_path in changed_files:
- try:
- # Handle renamed files
- old_path = rename_mapping.get(file_path, file_path)
- is_rename = file_path in rename_mapping
-
- # Get local and remote versions
- try:
- local_content = repo.git.show(f'HEAD:{old_path}')
- local_data = yaml.safe_load(local_content)
- except (GitCommandError, yaml.YAMLError):
- local_data = None
-
- try:
- remote_content = repo.git.show(
- f'origin/{branch}:{file_path}')
- remote_data = yaml.safe_load(remote_content)
- except (GitCommandError, yaml.YAMLError):
- remote_data = None
-
- # Skip if no actual changes
- if local_data == remote_data and not is_rename:
- continue
-
- # Check for conflicts and get commit info
- will_conflict = check_merge_conflict(repo, branch, file_path)
- commit_message = get_commit_message(repo, branch, file_path)
-
- # Generate change summary
- change = create_change_summary(local_data, remote_data,
- file_path)
-
- # Add incoming-specific fields
- change.update({
- 'commit_message':
- commit_message,
- 'type':
- determine_type(file_path),
- 'will_conflict':
- will_conflict,
- 'id':
- remote_data.get('id') if remote_data else None,
- 'local_name':
- extract_name(old_path)
- if is_rename else extract_name(file_path),
- 'incoming_name':
- extract_name(file_path),
- 'staged':
- False
- })
-
- if is_rename:
- change['status'] = 'Renamed'
-
- incoming_changes.append(change)
-
- except Exception as e:
- logger.error(
- f"Failed to process incoming change for {file_path}: {str(e)}"
- )
- continue
-
- return incoming_changes
-
- except Exception as e:
- logger.error(f"Failed to get incoming changes: {str(e)}")
- return []
diff --git a/backend/app/git/status/merge_conflicts.py b/backend/app/git/status/merge_conflicts.py
deleted file mode 100644
index 31825b9..0000000
--- a/backend/app/git/status/merge_conflicts.py
+++ /dev/null
@@ -1,141 +0,0 @@
-import os
-import yaml
-import logging
-from git import GitCommandError
-from .conflict_comparison import create_conflict_summary, UNRESOLVED, RESOLVED, MODIFY_DELETE
-
-logger = logging.getLogger(__name__)
-
-
-def get_version_data(repo, ref, file_path):
- """Get YAML data from a specific version of a file"""
- try:
- content = repo.git.show(f'{ref}:{file_path}')
- return yaml.safe_load(content) if content else None
- except GitCommandError:
- return None
-
-
-def process_modify_delete_conflict(repo, file_path, deleted_in_head):
- """Handle case where one side modified while other deleted"""
- try:
- # Check if conflict is resolved
- status_output = repo.git.status('--porcelain', file_path)
- file_exists = os.path.exists(os.path.join(repo.working_dir, file_path))
- is_staged = status_output and status_output[0] in ['M', 'A']
-
- # Determine status
- if (file_exists and is_staged) or (not file_exists
- and status_output.startswith('D ')):
- status = RESOLVED
- else:
- status = MODIFY_DELETE
-
- # For delete conflicts, we need to extract the name for display purposes
- # This will be the name of the actual file before it was deleted
- basename = os.path.basename(file_path)
- filename = os.path.splitext(basename)[0] # Strip extension
-
- # Get metadata from existing version to extract name if possible
- if file_exists:
- # File exists locally, read it
- try:
- with open(os.path.join(repo.working_dir, file_path), 'r') as f:
- existing_data = yaml.safe_load(f.read())
- except Exception as read_error:
- logger.warning(f"Could not read existing file {file_path}: {str(read_error)}")
- existing_data = {'name': filename}
- else:
- # File was deleted locally, try to get from merge head
- try:
- existing_data = get_version_data(repo, 'MERGE_HEAD', file_path)
- except Exception as merge_error:
- logger.warning(f"Could not get merge head for {file_path}: {str(merge_error)}")
- existing_data = {'name': filename}
-
- # Simplified placeholder data for deleted version
- if deleted_in_head:
- # File was deleted in HEAD (local) but exists in MERGE_HEAD (incoming)
- local_data = None # This indicates deleted
- try:
- # Try to get name from incoming
- incoming_data = existing_data if existing_data else {'name': filename}
- except Exception:
- incoming_data = {'name': filename}
- else:
- # File exists in HEAD (local) but deleted in MERGE_HEAD (incoming)
- try:
- local_data = existing_data if existing_data else {'name': filename}
- except Exception:
- local_data = {'name': filename}
- incoming_data = None # This indicates deleted
-
- return create_conflict_summary(file_path, local_data, incoming_data, status)
-
- except Exception as e:
- logger.error(
- f"Failed to process modify/delete conflict for {file_path}: {str(e)}"
- )
- return None
-
-
-def process_regular_conflict(repo, file_path):
- """Handle standard merge conflict between two versions"""
- try:
- # Get both versions
- ours_data = get_version_data(repo, 'HEAD', file_path)
- theirs_data = get_version_data(repo, 'MERGE_HEAD', file_path)
-
- if not ours_data and not theirs_data:
- return None
-
- # Check if conflict is resolved
- status_output = repo.git.status('--porcelain', file_path)
- status = UNRESOLVED if status_output.startswith('UU') else RESOLVED
-
- return create_conflict_summary(file_path, ours_data, theirs_data,
- status)
-
- except Exception as e:
- logger.error(f"Failed to process conflict for {file_path}: {str(e)}")
- return None
-
-
-def get_merge_conflicts(repo):
- """Get all merge conflicts in the repository"""
- try:
- # Check if we're in a merge state
- if not os.path.exists(os.path.join(repo.git_dir, 'MERGE_HEAD')):
- return []
-
- conflicts = []
- status = repo.git.status('--porcelain', '-z').split('\0')
-
- # Process each status entry
- for item in status:
- if not item or len(item) < 4:
- continue
-
- x, y = item[0], item[1]
- file_path = item[3:]
-
- # Handle modify/delete conflicts
- if (x == 'D' and y == 'U') or (x == 'U'
- and y == 'D') or (x == 'A'
- and y == 'U'):
- conflict = process_modify_delete_conflict(
- repo, file_path, x == 'D')
- if conflict:
- conflicts.append(conflict)
-
- # Handle regular conflicts
- elif 'U' in (x, y) or (x == 'D' and y == 'D'):
- conflict = process_regular_conflict(repo, file_path)
- if conflict:
- conflicts.append(conflict)
-
- return conflicts
-
- except Exception as e:
- logger.error(f"Failed to get merge conflicts: {str(e)}")
- return []
diff --git a/backend/app/git/status/outgoing_changes.py b/backend/app/git/status/outgoing_changes.py
deleted file mode 100644
index c80c507..0000000
--- a/backend/app/git/status/outgoing_changes.py
+++ /dev/null
@@ -1,110 +0,0 @@
-import os
-import yaml
-import logging
-from git import GitCommandError
-from .comparison import create_change_summary
-from .utils import determine_type, extract_name_from_path
-
-logger = logging.getLogger(__name__)
-
-
-# Use the centralized extract_name_from_path function from utils
-extract_name = extract_name_from_path
-
-
-def get_outgoing_changes(repo):
- """Get list of changes in working directory"""
- try:
- status = repo.git.status('--porcelain', '-z').split('\0')
- logger.info(f"Processing {len(status)} changes from git status")
-
- changes = []
- i = 0
-
- while i < len(status):
- item = status[i]
- if not item:
- i += 1
- continue
-
- if len(item) < 4:
- logger.warning(f"Invalid status item format: {item}")
- i += 1
- continue
-
- x, y = item[0], item[1]
- file_path = item[3:]
-
- # Skip files in conflict state
- if x == 'U' or y == 'U':
- i += 1
- continue
-
- # Handle renamed files
- if x == 'R' or y == 'R':
- if i + 1 < len(status) and status[i + 1]:
- outgoing_name = extract_name(file_path)
- prior_name = extract_name(status[i + 1])
- original_path = status[i + 1] # Path for old content
- new_path = file_path # Path for new content
- is_staged = x == 'R'
- status_value = 'Renamed'
- i += 2
- else:
- i += 1
- else:
- name = extract_name(file_path)
- prior_name = name
- outgoing_name = name
- original_path = file_path
- new_path = file_path
- is_staged = x != ' ' and x != '?'
- status_value = None
- i += 1
-
- try:
- # Get old content (from HEAD)
- try:
- old_content = repo.git.show(f'HEAD:{original_path}')
- old_data = yaml.safe_load(old_content)
- except GitCommandError:
- old_data = None
- except yaml.YAMLError as e:
- logger.warning(
- f"Failed to parse old YAML for {original_path}: {str(e)}"
- )
- old_data = None
-
- # Get new content (from working directory)
- try:
- full_path = os.path.join(repo.working_dir, new_path)
- with open(full_path, 'r') as f:
- new_data = yaml.safe_load(f.read())
- except (IOError, yaml.YAMLError) as e:
- logger.warning(
- f"Failed to read/parse current file {new_path}: {str(e)}"
- )
- new_data = None
-
- # Generate change summary
- change = create_change_summary(old_data, new_data, new_path)
- change['type'] = determine_type(new_path)
- change['staged'] = is_staged
- change['prior_name'] = prior_name
- change['outgoing_name'] = outgoing_name
-
- if status_value:
- change['status'] = status_value
-
- changes.append(change)
-
- except Exception as e:
- logger.error(f"Failed to process {file_path}: {str(e)}",
- exc_info=True)
-
- return changes
-
- except Exception as e:
- logger.error(f"Failed to get outgoing changes: {str(e)}",
- exc_info=True)
- return []
diff --git a/backend/app/git/status/status.py b/backend/app/git/status/status.py
deleted file mode 100644
index 71e32be..0000000
--- a/backend/app/git/status/status.py
+++ /dev/null
@@ -1,302 +0,0 @@
-# git/status/status.py
-import git
-from git.exc import GitCommandError, InvalidGitRepositoryError
-import logging
-from .incoming_changes import get_incoming_changes
-from .outgoing_changes import get_outgoing_changes
-from .merge_conflicts import get_merge_conflicts
-from .utils import determine_type
-import os
-import yaml
-import threading
-from datetime import datetime
-import json
-from ...db import get_settings
-
-logger = logging.getLogger(__name__)
-
-
-class GitStatusManager:
- _instance = None
- _lock = threading.Lock()
-
- def __init__(self, repo_path):
- self.repo_path = repo_path
- self.repo = git.Repo(repo_path)
- self.status = {
- # Local status
- "branch": "",
- "outgoing_changes": [],
- "is_merging": False,
- "merge_conflicts": [],
- "has_conflicts": False,
-
- # Remote status
- "remote_branch_exists": False,
- "commits_behind": 0,
- "commits_ahead": 0,
- "incoming_changes": [],
- "has_unpushed_commits": False,
- "unpushed_files": [],
-
- # Metadata
- "last_local_update": None,
- "last_remote_update": None
- }
-
- @classmethod
- def get_instance(cls, repo_path=None):
- if not cls._instance and repo_path:
- with cls._lock:
- if not cls._instance:
- cls._instance = cls(repo_path)
- return cls._instance
-
- def update_local_status(self):
- """Update only local repository status"""
- try:
- self.repo = git.Repo(self.repo_path) # Refresh repo instance
-
- with self._lock:
- # Update branch
- self.status["branch"] = self.repo.active_branch.name
-
- # Check merge status
- self.status["is_merging"] = os.path.exists(
- os.path.join(self.repo.git_dir, 'MERGE_HEAD'))
-
- # Get local changes
- self.status["outgoing_changes"] = get_outgoing_changes(
- self.repo)
-
- # Get merge conflicts if merging
- self.status["merge_conflicts"] = (get_merge_conflicts(
- self.repo) if self.status["is_merging"] else [])
- self.status["has_conflicts"] = bool(
- self.status["merge_conflicts"])
-
- # Update timestamp
- self.status["last_local_update"] = datetime.now().isoformat()
-
- return True
- except Exception as e:
- logger.error(f"Error updating local status: {str(e)}")
- return False
-
- def update_remote_status(self):
- """Update remote repository status - called by scheduled task"""
- try:
- logger.info(
- f"Updating remote status for branch: {self.status['branch']}")
-
- # Do the fetch outside the lock
- self.repo.remotes.origin.fetch()
-
- # Get branch name safely
- with self._lock:
- branch = self.status["branch"]
-
- # Do git operations outside lock
- remote_refs = [ref.name for ref in self.repo.remotes.origin.refs]
- remote_branch_exists = f"origin/{branch}" in remote_refs
-
- if remote_branch_exists:
- commits_behind = list(
- self.repo.iter_commits(f'{branch}..origin/{branch}'))
- commits_ahead = list(
- self.repo.iter_commits(f'origin/{branch}..{branch}'))
-
- # Handle auto-pull before updating status
- if len(commits_behind) > 0:
- logger.info(
- f"Branch is {len(commits_behind)} commits behind")
- try:
- settings = get_settings()
- if int(settings.get('auto_pull_enabled', 0)):
- logger.info("Auto-pull enabled, pulling changes")
- from ..operations.manager import GitOperations
- git_ops = GitOperations(self.repo_path)
- pull_result = git_ops.pull(branch)
- logger.info(f"Auto-pull result: {pull_result}")
- success, message = pull_result
- if not success:
- logger.error(f"Auto-pull failed: {message}")
- # Refresh counts after pull
- commits_behind = list(
- self.repo.iter_commits(
- f'{branch}..origin/{branch}'))
- commits_ahead = list(
- self.repo.iter_commits(
- f'origin/{branch}..{branch}'))
- except Exception as e:
- logger.error(f"Error during auto-pull: {str(e)}")
-
- # Prepare the status update
- incoming = get_incoming_changes(self.repo, branch)
- unpushed = self._get_unpushed_changes(
- branch) if commits_ahead else []
-
- # Only lock when updating the status
- with self._lock:
- self.status.update({
- "remote_branch_exists":
- remote_branch_exists,
- "commits_behind":
- len(commits_behind),
- "commits_ahead":
- len(commits_ahead),
- "has_unpushed_commits":
- len(commits_ahead) > 0,
- "incoming_changes":
- incoming,
- "unpushed_files":
- unpushed,
- "last_remote_update":
- datetime.now().isoformat()
- })
- else:
- with self._lock:
- self.status.update({
- "remote_branch_exists":
- False,
- "commits_behind":
- 0,
- "commits_ahead":
- 0,
- "has_unpushed_commits":
- False,
- "incoming_changes": [],
- "unpushed_files": [],
- "last_remote_update":
- datetime.now().isoformat()
- })
-
- return True
- except Exception as e:
- logger.error(f"Error updating remote status: {str(e)}")
- return False
-
- def _get_unpushed_changes(self, branch):
- """Get detailed info about files modified in unpushed commits"""
- try:
- unpushed_files = self.repo.git.diff(f'origin/{branch}..{branch}',
- '--name-only').split('\n')
- unpushed_files = [f for f in unpushed_files if f]
-
- detailed_changes = []
- for file_path in unpushed_files:
- try:
- with open(os.path.join(self.repo.working_dir, file_path),
- 'r') as f:
- content = yaml.safe_load(f.read())
-
- detailed_changes.append({
- 'type':
- determine_type(file_path),
- 'name':
- content.get('name', os.path.basename(file_path)),
- 'file_path':
- file_path
- })
- except Exception as e:
- logger.warning(
- f"Could not get details for {file_path}: {str(e)}")
- detailed_changes.append({
- 'type': determine_type(file_path),
- 'name': os.path.basename(file_path),
- 'file_path': file_path
- })
-
- return detailed_changes
- except Exception as e:
- logger.error(f"Error getting unpushed changes: {str(e)}")
- return []
-
- def get_status(self):
- """Get the current status without updating"""
- with self._lock:
- return self.status.copy()
-
-
-def format_git_status(status):
- """Format git status for logging with truncation and pretty printing.
-
- Args:
- status (dict): The git status dictionary to format
-
- Returns:
- str: Formatted status string
- """
-
- def truncate_list(lst, max_items=3):
- """Truncate a list and add count of remaining items."""
- if len(lst) <= max_items:
- return lst
- return lst[:max_items] + [f"... and {len(lst) - max_items} more items"]
-
- def truncate_string(s, max_length=50):
- """Truncate a string if it's too long."""
- if not s or len(s) <= max_length:
- return s
- return s[:max_length] + "..."
-
- # Create a copy to modify
- formatted_status = status.copy()
-
- # Truncate lists
- for key in [
- 'outgoing_changes', 'merge_conflicts', 'incoming_changes',
- 'unpushed_files'
- ]:
- if key in formatted_status and isinstance(formatted_status[key], list):
- formatted_status[key] = truncate_list(formatted_status[key])
-
- # Format any nested dictionaries in the lists
- for key in formatted_status:
- if isinstance(formatted_status[key], list):
- formatted_status[key] = [{
- k: truncate_string(str(v))
- for k, v in item.items()
- } if isinstance(item, dict) else item
- for item in formatted_status[key]]
-
- # Convert to JSON with nice formatting
- formatted_json = json.dumps(formatted_status, indent=2, default=str)
-
- # Add a timestamp header
- timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
- return f"=== Git Status at {timestamp} ===\n{formatted_json}"
-
-
-def get_git_status(repo_path):
- try:
- status_manager = GitStatusManager.get_instance(repo_path)
- status_manager.update_local_status()
- success, status = True, status_manager.get_status()
-
- # Log the formatted status
- logger.info("\n" + format_git_status(status))
-
- return success, status
- except git.exc.InvalidGitRepositoryError:
- logger.info(f"No git repository found at {repo_path}")
- empty_status = {
- "branch": "",
- "outgoing_changes": [],
- "is_merging": False,
- "merge_conflicts": [],
- "has_conflicts": False,
- "remote_branch_exists": False,
- "commits_behind": 0,
- "commits_ahead": 0,
- "incoming_changes": [],
- "has_unpushed_commits": False,
- "unpushed_files": [],
- "last_local_update": None,
- "last_remote_update": None,
- "has_repo": False
- }
- return True, empty_status
- except Exception as e:
- logger.error(f"Error in get_git_status: {str(e)}", exc_info=True)
- return False, str(e)
diff --git a/backend/app/git/status/utils.py b/backend/app/git/status/utils.py
deleted file mode 100644
index b1554a2..0000000
--- a/backend/app/git/status/utils.py
+++ /dev/null
@@ -1,173 +0,0 @@
-# git/status/utils.py
-
-import os
-import yaml
-import logging
-import re
-
-logger = logging.getLogger(__name__)
-
-
-def extract_data_from_yaml(file_path):
- logger.debug(f"Extracting data from file: {file_path}")
- try:
- with open(file_path, 'r') as f:
- content = yaml.safe_load(f)
- logger.debug(
- f"File content: {content}") # Log the full file content
- if content is None:
- logger.error(
- f"Failed to parse YAML file or file is empty: {file_path}")
- return None
-
- # Check if expected keys are in the content
- if 'name' not in content or 'id' not in content:
- logger.warning(
- f"'name' or 'id' not found in file: {file_path}")
-
- return {'name': content.get('name'), 'id': content.get('id')}
- except Exception as e:
- logger.warning(f"Error reading file {file_path}: {str(e)}")
- return None
-
-
-def determine_type(file_path):
- if 'regex_patterns' in file_path:
- return 'Regex Pattern'
- elif 'custom_formats' in file_path:
- return 'Custom Format'
- elif 'profiles' in file_path:
- return 'Quality Profile'
- elif 'media_management' in file_path:
- return 'Media Management'
- return 'Unknown'
-
-
-def format_media_management_name(name):
- """Format media management category names for display"""
- name_mapping = {
- 'misc': 'Miscellaneous',
- 'naming': 'Naming',
- 'quality_definitions': 'Quality Definitions'
- }
- return name_mapping.get(name, name)
-
-
-def extract_name_from_path(file_path):
- """Extract and format name from file path"""
- # Remove the file extension
- name = os.path.splitext(file_path)[0]
- # Remove the type prefix (everything before the first '/')
- if '/' in name:
- name = name.split('/', 1)[1]
-
- # Format media management names
- if 'media_management' in file_path:
- return format_media_management_name(name)
-
- return name
-
-
-def interpret_git_status(x, y):
- if x == 'D' or y == 'D':
- return 'Deleted'
- elif x == 'A':
- return 'Added'
- elif x == 'M' or y == 'M':
- return 'Modified'
- elif x == 'R':
- return 'Renamed'
- elif x == 'C':
- return 'Copied'
- elif x == 'U':
- return 'Updated but unmerged'
- elif x == '?' and y == '?':
- return 'Untracked'
- else:
- return 'Unknown'
-
-
-def parse_commit_message(commit_message):
- # Default placeholders for missing parts of the commit message
- placeholders = {
- 'type': 'Unknown Type',
- 'scope': 'Unknown Scope',
- 'subject': 'No subject provided',
- 'body': 'No body provided',
- 'footer': ''
- }
-
- # Mapping of commit types and scopes to canonical forms
- type_mapping = {
- 'feat': 'New Feature',
- 'feature': 'New Feature',
- 'new': 'New Feature',
- 'fix': 'BugFix',
- 'bugfix': 'BugFix',
- 'bug': 'BugFix',
- 'docs': 'Documentation',
- 'documentation': 'Documentation',
- 'doc': 'Documentation',
- 'style': 'Style Change',
- 'formatting': 'Style Change',
- 'format': 'Style Change',
- 'lint': 'Style Change',
- 'refactor': 'Refactor',
- 'refactoring': 'Refactor',
- 'restructure': 'Refactor',
- 'redesign': 'Refactor',
- 'perf': 'Performance Improvement',
- 'performance': 'Performance Improvement',
- 'optimize': 'Performance Improvement',
- 'optimisation': 'Performance Improvement',
- 'test': 'Test',
- 'testing': 'Test',
- 'chore': 'Maintenance',
- 'maintenance': 'Maintenance',
- 'maintain': 'Maintenance'
- }
-
- scope_mapping = {
- 'regex': 'Regex Pattern',
- 'regex pattern': 'Regex Pattern',
- 'format': 'Custom Format',
- 'custom format': 'Custom Format',
- 'profile': 'Quality Profile',
- 'quality profile': 'Quality Profile'
- }
-
- # Regex patterns for each part of the commit message
- type_pattern = r'^(?Pfeat|feature|new|fix|bugfix|bug|docs|documentation|doc|style|formatting|format|lint|refactor|refactoring|restructure|redesign|perf|performance|optimize|optimisation|test|testing|chore|maintenance|maintain)'
- scope_pattern = r'\((?Pregex|regex pattern|format|custom format|profile|quality profile)\)'
- subject_pattern = r':\s(?P.+)'
- body_pattern = r'(?P(?:- .+\n?)+)' # Handles multiple lines in the body
- footer_pattern = r'(?P