Compare commits

59 Commits

Author SHA1 Message Date
Jack Dallas
b4cf1e0a4f Improve filename matching for failed downloads 2022-08-11 23:42:07 +01:00
Jack Dallas
4cfdee6bc7 Start arr manager service 2022-08-11 12:52:25 +01:00
dependabot[bot]
80506f41d7 Bump terser from 5.14.0 to 5.14.2 in /web
Bumps [terser](https://github.com/terser/terser) from 5.14.0 to 5.14.2.
- [Release notes](https://github.com/terser/terser/releases)
- [Changelog](https://github.com/terser/terser/blob/master/CHANGELOG.md)
- [Commits](https://github.com/terser/terser/commits)

---
updated-dependencies:
- dependency-name: terser
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-22 17:16:47 +01:00
dependabot[bot]
b63e16b596 Bump svelte from 3.48.0 to 3.49.0 in /web
Bumps [svelte](https://github.com/sveltejs/svelte) from 3.48.0 to 3.49.0.
- [Release notes](https://github.com/sveltejs/svelte/releases)
- [Changelog](https://github.com/sveltejs/svelte/blob/master/CHANGELOG.md)
- [Commits](https://github.com/sveltejs/svelte/compare/v3.48.0...v3.49.0)

---
updated-dependencies:
- dependency-name: svelte
  dependency-type: direct:development
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-20 14:40:54 +01:00
Jack Dallas
5c06dd4200 Formalize json tags 2022-07-05 00:22:32 +01:00
Jack Dallas
2c05d8530f Add polling settings to ui 2022-07-05 00:22:32 +01:00
Jack Dallas
8c4c490db2 Add PollBlackhole option and update config version updating 2022-07-05 00:22:32 +01:00
Jack Dallas
d9bd141951 Don't full remove unzip dir on startup 2022-06-26 21:19:46 +00:00
Jack Dallas
802eeedfef Update docker labels with source 2022-06-26 21:19:46 +00:00
Jack Dallas
4f9d8299e6 Re-Work Docker build 2022-06-26 21:19:32 +00:00
Jack Dallas
6bdf60f272 README: Add user install instructions 2022-06-26 21:19:32 +00:00
Jack Dallas
742bd1e324 Create blackhole folder in docker 2022-06-26 21:19:32 +00:00
Jack Dallas
1cd862c5dc Split release and pre-release builds 2022-06-26 21:19:32 +00:00
Jack Dallas
9479ebe7fd Update README.md 2022-06-26 21:19:32 +00:00
Jack Dallas
4147817d6f Update .gitignore 2022-06-26 21:19:32 +00:00
Jack Dallas
c48259cb12 Standard error if premiumuize.me apikey is not set 2022-06-26 21:19:32 +00:00
Jack Dallas
18ce2c95e9 Increase trace logging & refactor config loading 2022-06-26 21:19:32 +00:00
Jack Dallas
ff832a5d18 Set config location before saving it 2022-06-26 21:19:32 +00:00
Jack Dallas
56d52d0b3a Refactor in to app structure, make all config options reloadable 2022-06-26 21:19:32 +00:00
Jack Dallas
3e50ba2ae1 UI: Dynamically work out webroot paths 2022-06-26 21:19:32 +00:00
Jack Dallas
898b53276a Docker: Pre-set all directory locations 2022-06-26 21:19:32 +00:00
Jack Dallas
91cf5bcfc8 API: Config Service
Config: Refactor Implementation
2022-06-26 21:19:32 +00:00
Jack Dallas
c86896e881 UI: Add config page 2022-06-26 21:19:32 +00:00
Jack Dallas
3af570479f Add build status badge 2022-06-26 21:19:32 +00:00
Jack Dallas
e6825dcb26 Update default config 2022-06-26 21:19:32 +00:00
Jack Dallas
7c06cb050b Update docker release method 2022-06-26 21:19:32 +00:00
Jack Dallas
4d610d3f59 [ci] Always setup docker 2022-06-26 21:19:32 +00:00
Jack Dallas
4bed257802 Enable pre-releases 2022-06-26 21:19:32 +00:00
Jack Dallas
07843219ef Add arm64 docker build 2022-06-26 21:19:32 +00:00
Jack Dallas
b32ae333a8 Handle web calls if when services aren't initialised 2022-06-26 21:19:32 +00:00
Jack Dallas
49a716764c Fix broken log messages 2022-06-26 21:19:32 +00:00
Jack Dallas
d6b123d7a3 Fix pointer deref 2022-06-26 21:19:32 +00:00
Jack Dallas
0a4d6923b1 Update NPM 2022-06-26 21:19:32 +00:00
Jack Dallas
da1a11dba5 UI: Support more errors 2022-06-26 21:19:32 +00:00
Jack Dallas
1e295c3608 Add environment variables for flags 2022-06-26 21:19:32 +00:00
Jack Dallas
78fc4b8b39 Rework simultaneous downloads cap 2022-06-26 21:19:32 +00:00
Jack Dallas
bdda3ca793 Update existing configs 2022-06-26 21:19:32 +00:00
Jack Dallas
d44204a8ed update docker ubuntu version & expose port 2022-06-26 21:19:32 +00:00
Jack Dallas
5fff9f9f53 Update Dockerfile (Thanks @JRDevo) 2022-06-26 21:19:32 +00:00
Jack Dallas
085d26c816 Add simultaneous downloads cap 2022-06-26 21:19:32 +00:00
dependabot[bot]
b60ef30a93 Bump async from 2.6.3 to 2.6.4 in /web
Bumps [async](https://github.com/caolan/async) from 2.6.3 to 2.6.4.
- [Release notes](https://github.com/caolan/async/releases)
- [Changelog](https://github.com/caolan/async/blob/v2.6.4/CHANGELOG.md)
- [Commits](https://github.com/caolan/async/compare/v2.6.3...v2.6.4)

---
updated-dependencies:
- dependency-name: async
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-06-26 21:19:32 +00:00
Jack Dallas
5af4083c67 Fix docker build 2022-06-26 21:19:32 +00:00
Jack Dallas
e8e93c667f Make config change a breaking update 2022-06-26 21:19:32 +00:00
dependabot[bot]
1893a1a5e4 Bump node-forge from 1.2.1 to 1.3.1 in /web
Bumps [node-forge](https://github.com/digitalbazaar/forge) from 1.2.1 to 1.3.1.
- [Release notes](https://github.com/digitalbazaar/forge/releases)
- [Changelog](https://github.com/digitalbazaar/forge/blob/main/CHANGELOG.md)
- [Commits](https://github.com/digitalbazaar/forge/compare/v1.2.1...v1.3.1)

---
updated-dependencies:
- dependency-name: node-forge
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-06-26 21:19:32 +00:00
dependabot[bot]
fdddc40699 Bump follow-redirects from 1.14.7 to 1.14.9 in /web
Bumps [follow-redirects](https://github.com/follow-redirects/follow-redirects) from 1.14.7 to 1.14.9.
- [Release notes](https://github.com/follow-redirects/follow-redirects/releases)
- [Commits](https://github.com/follow-redirects/follow-redirects/compare/v1.14.7...v1.14.9)

---
updated-dependencies:
- dependency-name: follow-redirects
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-06-26 21:19:32 +00:00
dependabot[bot]
828eb43a4c Bump minimist from 1.2.5 to 1.2.6 in /web
Bumps [minimist](https://github.com/substack/minimist) from 1.2.5 to 1.2.6.
- [Release notes](https://github.com/substack/minimist/releases)
- [Commits](https://github.com/substack/minimist/compare/1.2.5...1.2.6)

---
updated-dependencies:
- dependency-name: minimist
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-06-26 21:19:32 +00:00
Jack Dallas
969a3ac2cb Update README and docker location 2022-06-26 21:19:32 +00:00
Jack Dallas
83f1d19dfb Docker support 2022-06-26 21:19:32 +00:00
Jack Dallas
ff36423729 Make Arr's a list not locked to one of each type 2022-06-26 21:19:32 +00:00
Jack Dallas
cd0b5fba99 Make unzip directory configurable 2022-06-26 21:19:32 +00:00
Jack Dallas
df9c768066 Tweak web paths to work on url root and subpaths 2022-06-26 21:19:32 +00:00
Jack Dallas
3786e1411c Enhance logging 2022-06-26 21:19:32 +00:00
Dallas
4bf929967a Update README.md 2022-06-26 21:19:32 +00:00
Jack Dallas
43ea4903c3 Update README 2022-06-26 21:19:32 +00:00
Jack Dallas
a55eea881c Fix dpkg and perms 2022-06-26 21:19:32 +00:00
Jack Dallas
8c4c3a1b24 limit logs 2022-06-26 21:19:32 +00:00
Dallas
2db7e04604 Don't glob 2022-06-26 21:19:32 +00:00
Jack Dallas
7ec072a767 Upload artifacts 2022-06-26 21:19:32 +00:00
Jack Dallas
935813b27f fix ci 2022-06-26 21:19:32 +00:00
47 changed files with 10215 additions and 4230 deletions

View File

@@ -9,6 +9,7 @@ on:
permissions:
contents: write
packages: write
jobs:
build:
@@ -23,25 +24,67 @@ jobs:
with:
go-version: '1.17'
- name: Confirm Version
- name: go Version
run: go version
- name: Docker Version
run: docker version
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v2
# Standard Build
- name: Build
uses: goreleaser/goreleaser-action@v2
if: ! startsWith(github.ref, 'refs/tags/')
if: startsWith(github.ref, 'refs/tags/') == false
with:
distribution: goreleaser
version: latest
args: release --rm-dist --snapshot
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DOCKER_BUILDKIT: 1
COMPOSE_DOCKER_CLI_BUILD: 1
- uses: docker/login-action@v1
if: startsWith(github.ref, 'refs/tags/')
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
# Release build
- name: Release
uses: goreleaser/goreleaser-action@v2
if: startsWith(github.ref, 'refs/tags/')
if: startsWith(github.ref, 'refs/tags/') && !contains(github.ref, '-rc')
with:
distribution: goreleaser
version: latest
args: release --rm-dist
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DOCKER_BUILDKIT: 1
COMPOSE_DOCKER_CLI_BUILD: 1
# Pre-Release build
- name: Pre-Release
uses: goreleaser/goreleaser-action@v2
if: startsWith(github.ref, 'refs/tags/') && contains(github.ref, '-rc')
with:
distribution: goreleaser
version: latest
args: release --rm-dist -f .prerelease.goreleaser.yaml
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DOCKER_BUILDKIT: 1
COMPOSE_DOCKER_CLI_BUILD: 1
- name: Upload assets
uses: actions/upload-artifact@v2
with:
name: artifacts
path: dist/

6
.gitignore vendored
View File

@@ -18,5 +18,7 @@ build/
.vscode
premiumizearrd
premiumizearrd.exe
dist/
dist/
!cmd/premiumizearrd/

View File

@@ -1,3 +1,6 @@
env:
- DOCKER_BUILDKIT=1
before:
hooks:
- go mod tidy
@@ -13,9 +16,14 @@ builds:
goarch:
- amd64
- arm64
- arm
goarm:
- 7
ignore:
- goos: windows
goarch: arm64
- goos: windows
goarch: arm
archives:
- format_overrides:
@@ -31,50 +39,129 @@ archives:
- src: build/static/*
dst: static
strip_parent: true
checksum:
name_template: 'checksums.txt'
snapshot:
name_template: "{{ incpatch .Version }}-next"
changelog:
sort: asc
filters:
exclude:
- '^docs:'
- '^test:'
nfpms:
# note that this is an array of nfpm configs
-
# Name of the package.
# Defaults to `ProjectName`.
package_name: premiumizearr
# Your app's vendor.
# Default is empty.
bindir: /opt/premiumizearrd
vendor: Jack Dallas.
# Template to your app's homepage.
# Default is empty.
homepage: https://github.com/JackDallas/Premiumizearr
# Your app's maintainer (probably you).
# Default is empty.
maintainer: Dallas <jack-dallas@outlook.com>
# Template to your app's description.
# Default is empty.
description: Service to connect premiumize.me to Arr clients.
# Your app's license.
# Default is empty.
license: GPLv3
# Formats to be generated.
formats:
- deb
contents:
# Basic file that applies to all packagers
- src: build/static/*
dst: /usr/local/bin/static/
dst: /opt/premiumizearrd/static/
- src: init/premiumizearrd.service
dst: /etc/systemd/system/
dst: /etc/systemd/system/premiumizearrd.service
scripts:
postinstall: "scripts/postinstall.sh"
dockers:
-
use: buildx
goos: linux
goarch: amd64
image_templates:
- "ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64"
skip_push: "false"
build_flag_templates:
- "--pull"
- "--label=org.opencontainers.image.created={{.Date}}"
- "--label=org.opencontainers.image.title={{.ProjectName}}"
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
- "--label=org.opencontainers.image.version={{.Version}}"
- "--label=org.opencontainers.image.source=\"https://github.com/JackDallas/Premiumizearr\""
- "--platform=linux/amd64"
dockerfile: "docker/Dockerfile.amd64"
extra_files:
- build/static/
- docker/
-
use: buildx
goos: linux
goarch: arm64
image_templates:
- "ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64"
skip_push: "false"
build_flag_templates:
- "--pull"
- "--label=org.opencontainers.image.created={{.Date}}"
- "--label=org.opencontainers.image.title={{.ProjectName}}"
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
- "--label=org.opencontainers.image.version={{.Version}}"
- "--label=org.opencontainers.image.source=\"https://github.com/JackDallas/Premiumizearr\""
- "--platform=linux/arm64"
dockerfile: "docker/Dockerfile.arm64"
extra_files:
- build/static/
- docker/
-
use: buildx
goos: linux
goarch: arm
goarm: 7
image_templates:
- "ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7"
skip_push: "false"
build_flag_templates:
- "--pull"
- "--label=org.opencontainers.image.created={{.Date}}"
- "--label=org.opencontainers.image.title={{.ProjectName}}"
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
- "--label=org.opencontainers.image.version={{.Version}}"
- "--label=org.opencontainers.image.source=\"https://github.com/JackDallas/Premiumizearr\""
- "--platform=linux/arm/v7"
dockerfile: "docker/Dockerfile.armv7"
extra_files:
- build/static/
- docker/
docker_manifests:
# Release variants not created on rc-$i tags
- skip_push: auto
- name_template: 'ghcr.io/jackdallas/premiumizearr:latest'
image_templates:
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64'
- name_template: 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}'
image_templates:
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64'
- name_template: 'ghcr.io/jackdallas/premiumizearr:{{ .Major }}'
image_templates:
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64'
- name_template: 'ghcr.io/jackdallas/premiumizearr:{{ .Major }}.{{ .Minor }}'
image_templates:
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64'
release:
# If set to auto, will mark the release as not ready for production
# in case there is an indicator for this in the tag e.g. v1.0.0-rc1
# If set to true, will mark the release as not ready for production.
# Default is false.
prerelease: auto
footer: "**Full Changelog**: https://github.com/JackDallas/Premiumizearr/compare/{{ .PreviousTag }}...{{ .Tag }}"

147
.prerelease.goreleaser.yaml Normal file
View File

@@ -0,0 +1,147 @@
before:
hooks:
- go mod tidy
- make web
builds:
- env:
- CGO_ENABLED=0
goos:
- linux
- windows
main: ./cmd/premiumizearrd
binary: premiumizearrd
goarch:
- amd64
- arm64
- arm
goarm:
- 7
ignore:
- goos: windows
goarch: arm64
- goos: windows
goarch: arm
archives:
- format_overrides:
- goos: windows
format: zip
wrap_in_directory: true
files:
- README.md
- LICENSE
- src: build/*.service
dst: ./
strip_parent: true
- src: build/static/*
dst: static
strip_parent: true
checksum:
name_template: 'checksums.txt'
changelog:
sort: asc
filters:
exclude:
- '^docs:'
- '^test:'
nfpms:
-
package_name: premiumizearr
bindir: /opt/premiumizearrd
vendor: Jack Dallas.
homepage: https://github.com/JackDallas/Premiumizearr
maintainer: Dallas <jack-dallas@outlook.com>
description: Service to connect premiumize.me to Arr clients.
license: GPLv3
formats:
- deb
contents:
- src: build/static/*
dst: /opt/premiumizearrd/static/
- src: init/premiumizearrd.service
dst: /etc/systemd/system/premiumizearrd.service
scripts:
postinstall: "scripts/postinstall.sh"
dockers:
-
use: docker
goos: linux
goarch: amd64
image_templates:
- "ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64"
skip_push: "false"
build_flag_templates:
- "--pull"
- "--label=org.opencontainers.image.created={{.Date}}"
- "--label=org.opencontainers.image.title={{.ProjectName}}"
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
- "--label=org.opencontainers.image.version={{.Version}}"
- "--label=org.opencontainers.image.source=\"https://github.com/JackDallas/Premiumizearr\""
- "--platform=linux/amd64"
dockerfile: "docker/Dockerfile.amd64"
extra_files:
- build/static/
- docker/
-
use: buildx
goos: linux
goarch: arm64
image_templates:
- "ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64"
skip_push: "false"
build_flag_templates:
- "--pull"
- "--label=org.opencontainers.image.created={{.Date}}"
- "--label=org.opencontainers.image.title={{.ProjectName}}"
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
- "--label=org.opencontainers.image.version={{.Version}}"
- "--label=org.opencontainers.image.source=\"https://github.com/JackDallas/Premiumizearr\""
- "--platform=linux/arm64"
dockerfile: "docker/Dockerfile.arm64"
extra_files:
- build/static/
- docker/
-
use: buildx
goos: linux
goarch: arm
goarm: 7
image_templates:
- "ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7"
skip_push: "false"
build_flag_templates:
- "--pull"
- "--label=org.opencontainers.image.created={{.Date}}"
- "--label=org.opencontainers.image.title={{.ProjectName}}"
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
- "--label=org.opencontainers.image.version={{.Version}}"
- "--label=org.opencontainers.image.source=\"https://github.com/JackDallas/Premiumizearr\""
- "--platform=linux/arm/v7"
dockerfile: "docker/Dockerfile.armv7"
extra_files:
- build/static/
- docker/
docker_manifests:
- skip_push: false
- name_template: 'ghcr.io/jackdallas/premiumizearr:dev'
image_templates:
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64'
- name_template: 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}'
image_templates:
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64'
release:
prerelease: true
header: "Premiumizearr Pre-Release {{ .Tag }}"
footer: "**Full Changelog**: https://github.com/JackDallas/Premiumizearr/compare/{{ .PreviousTag }}...{{ .Tag }}"

108
README.md
View File

@@ -1,11 +1,109 @@
# premiumizearr
# Premiumizearr
[![Build](https://github.com/JackDallas/Premiumizearr/actions/workflows/build.yml/badge.svg)](https://github.com/JackDallas/Premiumizearr/actions/workflows/build.yml)
## Features
- Monitor blackhole directory to push `.magnet` and `.nzb` to Premiumize.me
- Monitor and download Premiumize.me transfers
- Mark transfers as failed in sonarr
- Monitor and download Premiumize.me transfers (web ui on default port 8182)
- Mark transfers as failed in Radarr & Sonarr
## Up Next
## Support the project by using my invite code
- Radarr support
[Invite Code](https://www.premiumize.me/ref/446038083)
## Install
[Grab the latest release artifact links here](https://github.com/JackDallas/Premiumizearr/releases/)
### Binary
#### System Install
```cli
wget https://github.com/JackDallas/Premiumizearr/releases/download/x.x.x/Premiumizearr_x.x.x_linux_amd64.tar.gz
tar xf Premiumizearr_x.x.x.x_linux_amd64.tar.gz
cd Premiumizearr_x.x.x.x_linux_amd64
sudo mkdir /opt/premiumizearrd/
sudo cp -r premiumizearrd static/ /opt/premiumizearrd/
sudo cp premiumizearrd.service /etc/systemd/system/
sudo systemctl-reload
sudo systemctl enable premiumizearrd.service
sudo systemctl start premiumizearrd.service
```
#### User Install
```cli
wget https://github.com/JackDallas/Premiumizearr/releases/download/x.x.x/Premiumizearr_x.x.x_linux_amd64.tar.gz
tar xf Premiumizearr_x.x.x.x_linux_amd64.tar.gz
cd Premiumizearr_x.x.x.x_linux_amd64
mkdir -p ~/.local/bin/
cp -r premiumizearrd static/ ~/.local/bin/
echo -e "export PATH=~/.local/bin/:$PATH" >> ~/.bashrc
source ~/.bashrc
```
You're now able to run the daemon from anywhere just by typing `premiumizearrd`
### deb file
```cmd
wget https://github.com/JackDallas/Premiumizearr/releases/download/x.x.x/premiumizearr_x.x.x._linux_amd64.deb
sudo dpkg -i premiumizearr_x.x.x.x_linux_amd64.deb
```
### Docker
[Docker images are listed here](https://github.com/jackdallas/Premiumizearr/pkgs/container/premiumizearr)
```cmd
docker run \
-v /home/dallas/test/data:/data \
-v /home/dallas/test/blackhole:/blackhole \
-v /home/dallas/test/downloads:/downloads \
-p 8182:8182 \
ghcr.io/jackdallas/premiumizearr:latest
```
If you wish to increase logging (which you'll be asked to do if you submit an issue) you can add `-e PREMIUMIZEARR_LOG_LEVEL=trace` to the command
> Note: The /data mount is where the `config.yaml` and log files are kept
## Setup
### Premiumizearrd
Running for the first time the server will start on `http://0.0.0.0:8182`
If you already use this binding for something else you can edit them in the `config.yaml`
> WARNING: This app exposes api keys in the ui and does not have authentication, it is strongly recommended you put it behind a reverse proxy with auth and set the host to `127.0.0.1` to hide the app from the web.
### Sonarr/Radarr
- Go to your Arr's `Download Client` settings page
- Add a new Torrent Blackhole client, set the `Torrent Folder` to the previously set `BlackholeDirectory` location, set the `Watch Folder` to the previously set `DownloadsDirectory` location
- Add a new Usenet Blackhole client, set the `Nzb Folder` to the previously set `BlackholeDirectory` location, set the `Watch Folder` to the previously set `DownloadsDirectory` location
### Reverse Proxy
Premiumizearr does not have authentication built in so it's strongly recommended you use a reverse proxy
#### Nginx
```nginx
location /premiumizearr/ {
proxy_pass http://127.0.0.1:8182/;
proxy_set_header Host $proxy_host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_redirect off;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $http_connection;
}
```

114
cmd/premiumizearrd/app.go Normal file
View File

@@ -0,0 +1,114 @@
package main
import (
"path"
"time"
"github.com/jackdallas/premiumizearr/internal/config"
"github.com/jackdallas/premiumizearr/internal/service"
"github.com/jackdallas/premiumizearr/pkg/premiumizeme"
"github.com/orandin/lumberjackrus"
log "github.com/sirupsen/logrus"
)
type App struct {
config config.Config
premiumizemeClient premiumizeme.Premiumizeme
transferManager service.TransferManagerService
directoryWatcher service.DirectoryWatcherService
webServer service.WebServerService
arrsManager service.ArrsManagerService
}
// Makes go vet error - prevents copies
func (app *App) Lock() {}
func (app *App) UnLock() {}
func (app *App) Start(logLevel string, configFile string, loggingDirectory string) error {
//Setup static login
lvl, err := log.ParseLevel(logLevel)
if err != nil {
log.Errorf("Error flag not recognized, defaulting to Info!!", err)
lvl = log.InfoLevel
}
log.SetLevel(lvl)
hook, err := lumberjackrus.NewHook(
&lumberjackrus.LogFile{
Filename: path.Join(loggingDirectory, "premiumizearr.general.log"),
MaxSize: 100,
MaxBackups: 1,
MaxAge: 1,
Compress: false,
LocalTime: false,
},
log.InfoLevel,
&log.TextFormatter{},
&lumberjackrus.LogFileOpts{
log.InfoLevel: &lumberjackrus.LogFile{
Filename: path.Join(loggingDirectory, "premiumizearr.info.log"),
MaxSize: 100,
MaxBackups: 1,
MaxAge: 1,
Compress: false,
LocalTime: false,
},
log.ErrorLevel: &lumberjackrus.LogFile{
Filename: path.Join(loggingDirectory, "premiumizearr.error.log"),
MaxSize: 100, // optional
MaxBackups: 1, // optional
MaxAge: 1, // optional
Compress: false, // optional
LocalTime: false, // optional
},
},
)
if err != nil {
panic(err)
}
log.AddHook(hook)
log.Info("---------- Starting premiumizearr daemon ----------")
log.Info("")
log.Trace("Running load or create config")
log.Tracef("Reading config file location from flag or env: %s", configFile)
app.config, err = config.LoadOrCreateConfig(configFile, app.ConfigUpdatedCallback)
if err != nil {
panic(err)
}
// Initialisation
app.premiumizemeClient = premiumizeme.NewPremiumizemeClient(app.config.PremiumizemeAPIKey)
app.transferManager = service.TransferManagerService{}.New()
app.directoryWatcher = service.DirectoryWatcherService{}.New()
app.webServer = service.WebServerService{}.New()
app.arrsManager = service.ArrsManagerService{}.New()
// Initialise Services
app.arrsManager.Init(&app.config)
app.directoryWatcher.Init(&app.premiumizemeClient, &app.config)
// Must come after arrsManager
app.transferManager.Init(&app.premiumizemeClient, &app.arrsManager, &app.config)
// Must come after transfer, arrManager and directory
app.webServer.Init(&app.transferManager, &app.directoryWatcher, &app.arrsManager, &app.config)
app.arrsManager.Start()
app.webServer.Start()
app.directoryWatcher.Start()
//Block until the program is terminated
app.transferManager.Run(15 * time.Second)
return nil
}
func (app *App) ConfigUpdatedCallback(currentConfig config.Config, newConfig config.Config) {
app.transferManager.ConfigUpdatedCallback(currentConfig, newConfig)
app.directoryWatcher.ConfigUpdatedCallback(currentConfig, newConfig)
app.webServer.ConfigUpdatedCallback(currentConfig, newConfig)
app.arrsManager.ConfigUpdatedCallback(currentConfig, newConfig)
}

View File

@@ -1,90 +1,24 @@
package main
import (
"flag"
"io"
"os"
"time"
"github.com/jackdallas/premiumizearr/internal/arr"
"github.com/jackdallas/premiumizearr/internal/config"
"github.com/jackdallas/premiumizearr/internal/service"
"github.com/jackdallas/premiumizearr/internal/web_service"
"github.com/jackdallas/premiumizearr/pkg/premiumizeme"
log "github.com/sirupsen/logrus"
"golift.io/starr"
"golift.io/starr/radarr"
"golift.io/starr/sonarr"
)
func main() {
//Flags
var logLevel string
var configFile string
//Parse flags
flag.StringVar(&logLevel, "log", "info", "Logging level: \n \tinfo,debug,trace")
flag.StringVar(&configFile, "config", "", "Config file path")
flag.Parse()
lvl, err := log.ParseLevel(logLevel)
if err != nil {
log.Errorf("Error flag not recognized, defaulting to Info!!", err)
lvl = log.InfoLevel
}
log.SetLevel(lvl)
logFile, err := os.OpenFile("premiumizearr.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
log.Error(err)
} else {
log.SetOutput(io.MultiWriter(logFile, os.Stdout))
}
log.Info("")
log.Info("---------- Starting premiumizearr daemon ----------")
log.Info("")
config, err := config.LoadOrCreateConfig(configFile)
if err != nil {
panic(err)
}
if config.PremiumizemeAPIKey == "" {
panic("premiumizearr API Key is empty")
}
// Initialisation
premiumizearr_client := premiumizeme.NewPremiumizemeClient(config.PremiumizemeAPIKey)
starr_config_sonarr := starr.New(config.SonarrAPIKey, config.SonarrURL, 0)
starr_config_radarr := starr.New(config.RadarrAPIKey, config.RadarrURL, 0)
sonarr_wrapper := arr.SonarrArr{
Client: sonarr.New(starr_config_sonarr),
History: nil,
LastUpdate: time.Now(),
}
radarr_wrapper := arr.RadarrArr{
Client: radarr.New(starr_config_radarr),
History: nil,
LastUpdate: time.Now(),
}
arrs := []arr.IArr{
&sonarr_wrapper,
&radarr_wrapper,
}
transfer_manager := service.NewTransferManagerService(premiumizearr_client, &arrs, &config)
directory_watcher := service.NewDirectoryWatcherService(premiumizearr_client, &config)
go directory_watcher.Watch()
go web_service.StartWebServer(&transfer_manager, &directory_watcher, &config)
//Block until the program is terminated
transfer_manager.Run(15 * time.Second)
}
package main
import (
"flag"
"github.com/jackdallas/premiumizearr/internal/utils"
)
func main() {
//Flags
var logLevel string
var configFile string
var loggingDirectory string
//Parse flags
flag.StringVar(&logLevel, "log", utils.EnvOrDefault("PREMIUMIZEARR_LOG_LEVEL", "info"), "Logging level: \n \tinfo,debug,trace")
flag.StringVar(&configFile, "config", utils.EnvOrDefault("PREMIUMIZEARR_CONFIG_DIR_PATH", "./"), "The directory the config.yml is located in")
flag.StringVar(&loggingDirectory, "logging-dir", utils.EnvOrDefault("PREMIUMIZEARR_LOGGING_DIR_PATH", "./"), "The directory logs are to be written to")
flag.Parse()
App := &App{}
App.Start(logLevel, configFile, loggingDirectory)
}

17
config.yaml Normal file
View File

@@ -0,0 +1,17 @@
PremiumizemeAPIKey: xxxxxxxxx
Arrs:
- Name: ""
URL: http://localhost:8989
APIKey: xxxxxxxxx
Type: Sonarr
- Name: ""
URL: http://localhost:7878
APIKey: xxxxxxxxx
Type: Radarr
BlackholeDirectory: ""
DownloadsDirectory: ""
UnzipDirectory: ""
bindIP: 0.0.0.0
bindPort: "8182"
WebRoot: ""
SimultaneousDownloads: 5

View File

@@ -1,9 +0,0 @@
PremiumizemeAPIKey: ""
SonarrURL: http://localhost:8989
SonarrAPIKey: ""
RadarrURL: http://localhost:7878
RadarrAPIKey: ""
BlackholeDirectory: ""
DownloadsDirectory: ""
bindIP: 0.0.0.0
bindPort: "8182"

5
docker/Dockerfile.amd64 Normal file
View File

@@ -0,0 +1,5 @@
# syntax=edrevo/dockerfile-plus
FROM ghcr.io/linuxserver/baseimage-alpine:3.16-f525477c-ls6@sha256:c25011f564093f523b1a793658d19275d9eac5a7f21aa5d00ce6cdff29c2a8c1
INCLUDE+ docker/Dockerfile.common

5
docker/Dockerfile.arm64 Normal file
View File

@@ -0,0 +1,5 @@
# syntax=edrevo/dockerfile-plus
FROM ghcr.io/linuxserver/baseimage-alpine:3.16-f525477c-ls6@sha256:611bc4a5a75132914dba740dffa4adcea5039fbe67e3704afd5731a55bf8c82f
INCLUDE+ docker/Dockerfile.common

5
docker/Dockerfile.armv7 Normal file
View File

@@ -0,0 +1,5 @@
# syntax=edrevo/dockerfile-plus
FROM ghcr.io/linuxserver/baseimage-alpine:3.16-f525477c-ls6@sha256:a31127cd9764c95d6137764a1854402d3a33ee085edd139e08726e2fc98d2254
INCLUDE+ docker/Dockerfile.common

23
docker/Dockerfile.common Normal file
View File

@@ -0,0 +1,23 @@
LABEL build_version="Premiumizearr version:- ${VERSION} Build-date:- ${BUILD_DATE}"
LABEL maintainer="JackDallas"
COPY docker/root/ /
EXPOSE 8182
RUN mkdir /data
RUN mkdir /unzip
RUN mkdir /downloads
RUN mkdir /transfers
RUN mkdir /blackhole
RUN mkdir -p /opt/app/
WORKDIR /opt/app/
ENV PREMIUMIZEARR_CONFIG_DIR_PATH=/data
ENV PREMIUMIZEARR_LOGGING_DIR_PATH=/data
COPY premiumizearrd /opt/app/
COPY build/static /opt/app/static
ENTRYPOINT ["/init"]

View File

@@ -0,0 +1,10 @@
#!/usr/bin/with-contenv bash
# permissions
chown -R abc:abc \
/data \
/unzip \
/downloads \
/transfers \
/blackhole \
/opt \

View File

@@ -0,0 +1,6 @@
#!/usr/bin/with-contenv bash
cd /opt/app/ || exit
exec \
s6-setuidgid abc /opt/app/premiumizearrd

3
go.mod
View File

@@ -6,12 +6,15 @@ require (
github.com/dustin/go-humanize v1.0.0
github.com/fsnotify/fsnotify v1.5.1
github.com/gorilla/mux v1.8.0
github.com/orandin/lumberjackrus v1.0.1
github.com/sirupsen/logrus v1.8.1
golift.io/starr v0.13.0
gopkg.in/yaml.v2 v2.4.0
)
require (
github.com/BurntSushi/toml v1.0.0 // indirect
golang.org/x/net v0.0.0-20220114011407-0dd24b26b47d // indirect
golang.org/x/sys v0.0.0-20211110154304-99a53858aa08 // indirect
gopkg.in/natefinch/lumberjack.v2 v2.0.0 // indirect
)

6
go.sum
View File

@@ -1,3 +1,5 @@
github.com/BurntSushi/toml v1.0.0 h1:dtDWrepsVPfW9H/4y7dDgFc2MBUSeJhlaDtK13CxFlU=
github.com/BurntSushi/toml v1.0.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
@@ -7,6 +9,8 @@ github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5
github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs=
github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
github.com/orandin/lumberjackrus v1.0.1 h1:7ysDQ0MHD79zIFN9/EiDHjUcgopNi5ehtxFDy8rUkWo=
github.com/orandin/lumberjackrus v1.0.1/go.mod h1:xYLt6H8W93pKnQgUQaxsApS0Eb4BwHLOkxk5DVzf5H0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE=
@@ -48,5 +52,7 @@ golift.io/starr v0.13.0 h1:LoihBAH3DQ0ikPNHTVg47tUU+475mzbr1ahMcY5gdno=
golift.io/starr v0.13.0/go.mod h1:IZIzdT5/NBdhM08xAEO5R1INgGN+Nyp4vCwvgHrbKVs=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8=
gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=

View File

@@ -2,8 +2,8 @@
Description=Premiumizearr Daemon
After=network.target
[Service]
User=$USER
Group=$USER
User=1000
Group=1000
UMask=0002
Type=simple
ExecStart=/opt/premiumizearrd/premiumizearrd

View File

@@ -18,7 +18,7 @@ import (
//Data Access
//GetHistory: Updates the history if it's been more than 15 seconds since last update
// GetHistory: Updates the history if it's been more than 15 seconds since last update
func (arr *RadarrArr) GetHistory() (radarr.History, error) {
arr.LastUpdateMutex.Lock()
defer arr.LastUpdateMutex.Unlock()
@@ -78,20 +78,20 @@ func (arr *RadarrArr) GetArrName() string {
//Functions
func (arr *RadarrArr) HistoryContains(name string) (int64, bool) {
log.Tracef("Radarr.HistoryContains(): Checking history for %s", name)
log.Tracef("Radarr [%s]: Checking history for %s", arr.Name, name)
his, err := arr.GetHistory()
if err != nil {
log.Errorf("Radarr.HistoryContains(): Failed to get history: %+v", err)
log.Errorf("Radarr [%s]: Failed to get history: %+v", arr.Name, err)
return -1, false
}
log.Trace("Radarr.HistoryContains(): Got History, now Locking History")
log.Trace("Radarr [%s]: Got History, now Locking History", arr.Name)
arr.HistoryMutex.Lock()
defer arr.HistoryMutex.Unlock()
name = utils.StripDownloadTypesExtention(name)
// name = strings.ReplaceAll(name, ".", " ")
for _, item := range his.Records {
if item.SourceTitle == name {
if utils.StripDownloadTypesExtention(item.SourceTitle) == name || item.SourceTitle == name {
return item.ID, true
}
}

View File

@@ -18,7 +18,7 @@ import (
//Data Access
//GetHistory: Updates the history if it's been more than 15 seconds since last update
// GetHistory: Updates the history if it's been more than 15 seconds since last update
func (arr *SonarrArr) GetHistory() (sonarr.History, error) {
arr.LastUpdateMutex.Lock()
defer arr.LastUpdateMutex.Unlock()
@@ -77,22 +77,22 @@ func (arr *SonarrArr) GetArrName() string {
// Functions
func (arr *SonarrArr) HistoryContains(name string) (int64, bool) {
log.Tracef("Sonarr.HistoryContains(): Checking history for %s", name)
log.Tracef("Sonarr [%s]: Checking history for %s", arr.Name, name)
his, err := arr.GetHistory()
if err != nil {
return 0, false
}
log.Trace("Sonarr.HistoryContains(): Got History, now Locking History")
log.Trace("Sonarr [%s]: Got History, now Locking History")
arr.HistoryMutex.Lock()
defer arr.HistoryMutex.Unlock()
name = utils.StripDownloadTypesExtention(name)
for _, item := range his.Records {
if utils.StripDownloadTypesExtention(item.SourceTitle) == name {
if utils.StripDownloadTypesExtention(item.SourceTitle) == name || item.SourceTitle == name {
return item.ID, true
}
}
log.Tracef("Sonarr.HistoryContains(): %s Not in History", name)
log.Tracef("Sonarr [%s]: %s Not in History", name)
return -1, false
}

View File

@@ -9,7 +9,6 @@ import (
"golift.io/starr/sonarr"
)
//TODO: Thread Safe version
type IArr interface {
HistoryContains(string) (int64, bool)
MarkHistoryItemAsFailed(int64) error
@@ -18,6 +17,7 @@ type IArr interface {
}
type SonarrArr struct {
Name string
ClientMutex sync.Mutex
Client *sonarr.Sonarr
HistoryMutex sync.Mutex
@@ -29,6 +29,7 @@ type SonarrArr struct {
}
type RadarrArr struct {
Name string
ClientMutex sync.Mutex
Client *radarr.Radarr
HistoryMutex sync.Mutex

View File

@@ -3,99 +3,211 @@ package config
import (
"errors"
"io/ioutil"
"log"
"strings"
"github.com/jackdallas/premiumizearr/internal/utils"
log "github.com/sirupsen/logrus"
"os"
"path"
"gopkg.in/yaml.v2"
)
var (
ErrInvalidConfigFile = errors.New("invalid Config File")
ErrFailedToFindConfigFile = errors.New("failed to find config file")
)
type Config struct {
PremiumizemeAPIKey string `yaml:"PremiumizemeAPIKey"`
SonarrURL string `yaml:"SonarrURL"`
SonarrAPIKey string `yaml:"SonarrAPIKey"`
RadarrURL string `yaml:"RadarrURL"`
RadarrAPIKey string `yaml:"RadarrAPIKey"`
BlackholeDirectory string `yaml:"BlackholeDirectory"`
DownloadsDirectory string `yaml:"DownloadsDirectory"`
BindIP string `yaml:"bindIP"`
BindPort string `yaml:"bindPort"`
}
func loadConfigFromDisk() (Config, error) {
var config Config
file, err := ioutil.ReadFile("config.yaml")
// LoadOrCreateConfig - Loads the config from disk or creates a new one
func LoadOrCreateConfig(altConfigLocation string, _appCallback AppCallback) (Config, error) {
config, err := loadConfigFromDisk(altConfigLocation)
if err != nil {
return config, ErrFailedToFindConfigFile
if err == ErrFailedToFindConfigFile {
log.Warn("No config file found, created default config file")
config = defaultConfig()
}
if err == ErrInvalidConfigFile || err == ErrFailedToSaveConfig {
return config, err
}
}
err = yaml.Unmarshal(file, &config)
if err != nil {
return config, ErrInvalidConfigFile
// Override unzip directory if running in docker
if utils.IsRunningInDockerContainer() {
log.Info("Running in docker, overriding unzip directory!")
config.UnzipDirectory = "/unzip"
// Override config data directories if blank
if config.BlackholeDirectory == "" {
log.Trace("Running in docker, overriding blank directory settings for blackhole directory")
config.BlackholeDirectory = "/blackhole"
}
if config.DownloadsDirectory == "" {
log.Trace("Running in docker, overriding blank directory settings for downloads directory")
config.DownloadsDirectory = "/downloads"
}
}
log.Tracef("Setting config location to %s", altConfigLocation)
config.appCallback = _appCallback
config.altConfigLocation = altConfigLocation
config.Save()
return config, nil
}
func createDefaultConfig() error {
config := Config{
PremiumizemeAPIKey: "",
SonarrURL: "http://localhost:8989",
SonarrAPIKey: "",
RadarrURL: "http://localhost:7878",
RadarrAPIKey: "",
BlackholeDirectory: "",
DownloadsDirectory: "",
BindIP: "0.0.0.0",
BindPort: "8182",
}
file, err := yaml.Marshal(config)
// Save - Saves the config to disk
func (c *Config) Save() error {
log.Trace("Marshaling & saving config")
data, err := yaml.Marshal(*c)
if err != nil {
log.Error(err)
return err
}
err = ioutil.WriteFile("config.yaml", file, 0644)
savePath := "./config.yaml"
if c.altConfigLocation != "" {
savePath = path.Join(c.altConfigLocation, "config.yaml")
}
log.Tracef("Writing config to %s", savePath)
err = ioutil.WriteFile(savePath, data, 0644)
if err != nil {
log.Errorf("Failed to save config file: %+v", err)
return err
}
log.Trace("Config saved")
return nil
}
func LoadOrCreateConfig(altConfigLocation string) (Config, error) {
if altConfigLocation != "" {
if _, err := ioutil.ReadFile(altConfigLocation); err != nil {
log.Panicf("Failed to find config file at %s Error: %+v", altConfigLocation, err)
}
}
func loadConfigFromDisk(altConfigLocation string) (Config, error) {
var config Config
log.Trace("Trying to load config from disk")
configLocation := path.Join(altConfigLocation, "config.yaml")
log.Tracef("Reading config from %s", configLocation)
file, err := ioutil.ReadFile(configLocation)
config, err := loadConfigFromDisk()
if err != nil {
if err == ErrFailedToFindConfigFile {
err = createDefaultConfig()
if err != nil {
return config, err
}
panic("Default config created, please fill it out")
}
if err == ErrInvalidConfigFile {
return config, ErrInvalidConfigFile
}
}
//Clean up url
if strings.HasSuffix(config.SonarrURL, ("/")) {
config.SonarrURL = config.SonarrURL[:len(config.SonarrURL)-1]
log.Trace("Failed to find config file")
return config, ErrFailedToFindConfigFile
}
log.Trace("Loading to interface")
var configInterface map[interface{}]interface{}
err = yaml.Unmarshal(file, &configInterface)
if err != nil {
log.Errorf("Failed to unmarshal config file: %+v", err)
return config, ErrInvalidConfigFile
}
log.Trace("Unmarshalling to struct")
err = yaml.Unmarshal(file, &config)
if err != nil {
log.Errorf("Failed to unmarshal config file: %+v", err)
return config, ErrInvalidConfigFile
}
log.Trace("Checking for missing config fields")
updated := false
if configInterface["PollBlackholeDirectory"] == nil {
log.Info("PollBlackholeDirectory not set, setting to false")
config.PollBlackholeDirectory = false
updated = true
}
if configInterface["SimultaneousDownloads"] == nil {
log.Info("SimultaneousDownloads not set, setting to 5")
config.SimultaneousDownloads = 5
updated = true
}
if configInterface["PollBlackholeIntervalMinutes"] == nil {
log.Info("PollBlackholeIntervalMinutes not set, setting to 10")
config.PollBlackholeIntervalMinutes = 10
updated = true
}
config.altConfigLocation = altConfigLocation
if updated {
log.Trace("Version updated saving")
err = config.Save()
if err == nil {
log.Trace("Config saved")
return config, nil
} else {
log.Errorf("Failed to save config to %s", configLocation)
log.Error(err)
return config, ErrFailedToSaveConfig
}
}
log.Trace("Config loaded")
return config, nil
}
func defaultConfig() Config {
return Config{
PremiumizemeAPIKey: "xxxxxxxxx",
Arrs: []ArrConfig{
{Name: "Sonarr", URL: "http://localhost:8989", APIKey: "xxxxxxxxx", Type: Sonarr},
{Name: "Radarr", URL: "http://localhost:7878", APIKey: "xxxxxxxxx", Type: Radarr},
},
BlackholeDirectory: "",
PollBlackholeDirectory: false,
PollBlackholeIntervalMinutes: 10,
DownloadsDirectory: "",
UnzipDirectory: "",
BindIP: "0.0.0.0",
BindPort: "8182",
WebRoot: "",
SimultaneousDownloads: 5,
}
}
var (
ErrUnzipDirectorySetToRoot = errors.New("unzip directory set to root")
ErrUnzipDirectoryNotWriteable = errors.New("unzip directory not writeable")
)
func (c *Config) GetUnzipBaseLocation() (string, error) {
if c.UnzipDirectory == "" {
log.Tracef("Unzip directory not set, using default: %s", os.TempDir())
return path.Join(os.TempDir(), "premiumizearrd"), nil
}
if c.UnzipDirectory == "/" || c.UnzipDirectory == "\\" || c.UnzipDirectory == "C:\\" {
log.Error("Unzip directory set to root, please set a directory")
return "", ErrUnzipDirectorySetToRoot
}
if !utils.IsDirectoryWriteable(c.UnzipDirectory) {
log.Errorf("Unzip directory not writeable: %s", c.UnzipDirectory)
return c.UnzipDirectory, ErrUnzipDirectoryNotWriteable
}
log.Tracef("Unzip directory set to: %s", c.UnzipDirectory)
return c.UnzipDirectory, nil
}
func (c *Config) GetNewUnzipLocation() (string, error) {
// Create temp dir in os temp location or unzip-directory
tempDir, err := c.GetUnzipBaseLocation()
if err != nil {
return "", err
}
log.Trace("Creating unzip directory")
err = os.MkdirAll(tempDir, os.ModePerm)
if err != nil {
return "", err
}
log.Trace("Creating generated unzip directory")
dir, err := ioutil.TempDir(tempDir, "unzip-")
if err != nil {
return "", err
}
return dir, nil
}

View File

@@ -0,0 +1,13 @@
package config
func (c *Config) UpdateConfig(_newConfig Config) {
oldConfig := *c
//move private fields over
_newConfig.appCallback = c.appCallback
_newConfig.altConfigLocation = c.altConfigLocation
*c = _newConfig
c.appCallback(oldConfig, *c)
c.Save()
}

52
internal/config/types.go Normal file
View File

@@ -0,0 +1,52 @@
package config
import "errors"
var (
ErrInvalidConfigFile = errors.New("invalid Config File")
ErrFailedToFindConfigFile = errors.New("failed to find config file")
ErrFailedToSaveConfig = errors.New("failed to save config")
)
//ArrType enum for Sonarr/Radarr
type ArrType string
//AppCallback - Callback for the app to use
type AppCallback func(oldConfig Config, newConfig Config)
const (
Sonarr ArrType = "Sonarr"
Radarr ArrType = "Radarr"
)
type ArrConfig struct {
Name string `yaml:"Name" json:"Name"`
URL string `yaml:"URL" json:"URL"`
APIKey string `yaml:"APIKey" json:"APIKey"`
Type ArrType `yaml:"Type" json:"Type"`
}
type Config struct {
altConfigLocation string
appCallback AppCallback
//PremiumizemeAPIKey string with yaml and json tag
PremiumizemeAPIKey string `yaml:"PremiumizemeAPIKey" json:"PremiumizemeAPIKey"`
Arrs []ArrConfig `yaml:"Arrs" json:"Arrs"`
BlackholeDirectory string `yaml:"BlackholeDirectory" json:"BlackholeDirectory"`
PollBlackholeDirectory bool `yaml:"PollBlackholeDirectory" json:"PollBlackholeDirectory"`
PollBlackholeIntervalMinutes int `yaml:"PollBlackholeIntervalMinutes" json:"PollBlackholeIntervalMinutes"`
DownloadsDirectory string `yaml:"DownloadsDirectory" json:"DownloadsDirectory"`
UnzipDirectory string `yaml:"UnzipDirectory" json:"UnzipDirectory"`
BindIP string `yaml:"bindIP" json:"BindIP"`
BindPort string `yaml:"bindPort" json:"BindPort"`
WebRoot string `yaml:"WebRoot" json:"WebRoot"`
SimultaneousDownloads int `yaml:"SimultaneousDownloads" json:"SimultaneousDownloads"`
}

View File

@@ -19,17 +19,16 @@ func NewDirectoryWatcher(path string, recursive bool, matchFunction func(string)
}
func (w *WatchDirectory) Watch() error {
watcher, err := fsnotify.NewWatcher()
var err error
w.Watcher, err = fsnotify.NewWatcher()
if err != nil {
return err
}
defer watcher.Close()
done := make(chan bool)
go func() {
for {
select {
case event, ok := <-watcher.Events:
case event, ok := <-w.Watcher.Events:
if !ok {
return
}
@@ -38,7 +37,7 @@ func (w *WatchDirectory) Watch() error {
w.CallbackFunction(event.Name)
}
}
case _, ok := <-watcher.Errors:
case _, ok := <-w.Watcher.Errors:
if !ok {
return
}
@@ -52,10 +51,20 @@ func (w *WatchDirectory) Watch() error {
return err
}
err = watcher.Add(cleanPath)
err = w.Watcher.Add(cleanPath)
if err != nil {
return err
}
<-done
return nil
}
func (w *WatchDirectory) UpdatePath(path string) error {
w.Watcher.Remove(w.Path)
w.Path = path
return w.Watcher.Add(w.Path)
}
func (w *WatchDirectory) Stop() error {
return w.Watcher.Close()
}

View File

@@ -1,5 +1,7 @@
package directory_watcher
import "github.com/fsnotify/fsnotify"
// WatchDirectory watches a directory for changes.
type WatchDirectory struct {
// Path is the path to the directory to watch.
@@ -12,4 +14,6 @@ type WatchDirectory struct {
MatchFunction func(string) bool
// Callback is the function to call when a file is created that matches with MatchFunction.
CallbackFunction func(string)
// watcher is the fsnotify watcher.
Watcher *fsnotify.Watcher
}

View File

@@ -0,0 +1,96 @@
package service
import (
"time"
"github.com/jackdallas/premiumizearr/internal/arr"
"github.com/jackdallas/premiumizearr/internal/config"
log "github.com/sirupsen/logrus"
"golift.io/starr"
"golift.io/starr/radarr"
"golift.io/starr/sonarr"
)
type ArrsManagerService struct {
arrs []arr.IArr
config *config.Config
}
func (am ArrsManagerService) New() ArrsManagerService {
am.arrs = []arr.IArr{}
return am
}
func (am *ArrsManagerService) Init(_config *config.Config) {
am.config = _config
}
func (am *ArrsManagerService) Start() {
am.arrs = []arr.IArr{}
log.Debugf("Starting ArrsManagerService")
for _, arr_config := range am.config.Arrs {
switch arr_config.Type {
case config.Sonarr:
c := starr.New(arr_config.APIKey, arr_config.URL, 0)
wrapper := arr.SonarrArr{
Name: arr_config.Name,
Client: sonarr.New(c),
History: nil,
LastUpdate: time.Now(),
}
am.arrs = append(am.arrs, &wrapper)
log.Tracef("Added Sonarr arr: %s", arr_config.Name)
case config.Radarr:
c := starr.New(arr_config.APIKey, arr_config.URL, 0)
wrapper := arr.RadarrArr{
Name: arr_config.Name,
Client: radarr.New(c),
History: nil,
LastUpdate: time.Now(),
}
am.arrs = append(am.arrs, &wrapper)
log.Tracef("Added Radarr arr: %s", arr_config.Name)
default:
log.Error("Unknown arr type: %s, not adding Arr %s", arr_config.Type, arr_config.Name)
}
}
log.Debugf("Created %d Arrs", len(am.arrs))
}
func (am *ArrsManagerService) Stop() {
//noop
}
func (am *ArrsManagerService) ConfigUpdatedCallback(currentConfig config.Config, newConfig config.Config) {
if len(currentConfig.Arrs) != len(newConfig.Arrs) {
am.Start()
return
}
for i, arr_config := range newConfig.Arrs {
if currentConfig.Arrs[i].Type != arr_config.Type ||
currentConfig.Arrs[i].APIKey != arr_config.APIKey ||
currentConfig.Arrs[i].URL != arr_config.URL {
am.Start()
return
}
}
}
func (am *ArrsManagerService) GetArrs() []arr.IArr {
return am.arrs
}
func TestArrConnection(arr config.ArrConfig) error {
c := starr.New(arr.APIKey, arr.URL, 0)
switch arr.Type {
case config.Sonarr:
_, err := sonarr.New(c).GetSystemStatus()
return err
case config.Radarr:
_, err := radarr.New(c).GetSystemStatus()
return err
default:
return nil
}
}

View File

@@ -21,6 +21,7 @@ type DirectoryWatcherService struct {
Queue *stringqueue.StringQueue
status string
downloadsFolderID string
watchDirectory *directory_watcher.WatchDirectory
}
const (
@@ -28,11 +29,32 @@ const (
ERROR_ALREADY_UPLOADED = "You already added this job."
)
func NewDirectoryWatcherService(pm *premiumizeme.Premiumizeme, con *config.Config) DirectoryWatcherService {
func (DirectoryWatcherService) New() DirectoryWatcherService {
return DirectoryWatcherService{
premiumizemeClient: pm,
config: con,
premiumizemeClient: nil,
config: nil,
Queue: nil,
status: "",
downloadsFolderID: "",
}
}
func (dw *DirectoryWatcherService) Init(premiumizemeClient *premiumizeme.Premiumizeme, config *config.Config) {
dw.premiumizemeClient = premiumizemeClient
dw.config = config
}
func (dw *DirectoryWatcherService) ConfigUpdatedCallback(currentConfig config.Config, newConfig config.Config) {
if currentConfig.BlackholeDirectory != newConfig.BlackholeDirectory {
log.Info("Blackhole directory changed, restarting directory watcher...")
log.Info("Running initial directory scan...")
go dw.directoryScan(dw.config.BlackholeDirectory)
dw.watchDirectory.UpdatePath(newConfig.BlackholeDirectory)
}
if currentConfig.PollBlackholeDirectory != newConfig.PollBlackholeDirectory {
log.Info("Poll blackhole directory changed, restarting directory watcher...")
dw.Start()
}
}
@@ -40,45 +62,60 @@ func (dw *DirectoryWatcherService) GetStatus() string {
return dw.status
}
//TODO (Radarr): accept paths as a parameter, support multiple paths
//Watch: This is the entrypoint for the directory watcher
func (dw *DirectoryWatcherService) Watch() {
//Start: This is the entrypoint for the directory watcher
func (dw *DirectoryWatcherService) Start() {
log.Info("Starting directory watcher...")
dw.downloadsFolderID = utils.GetDownloadsFolderIDFromPremiumizeme(dw.premiumizemeClient)
log.Info("Clearing tmp directory...")
tempDir := utils.GetTempBaseDir()
err := os.RemoveAll(tempDir)
if err != nil {
log.Errorf("Error clearing tmp directory %s", tempDir)
}
os.Mkdir(tempDir, os.ModePerm)
log.Info("Creating Queue...")
dw.Queue = stringqueue.NewStringQueue()
log.Info("Starting uploads processor...")
go dw.processUploads()
log.Info("Starting initial directory scans...")
go dw.initialDirectoryScan(dw.config.BlackholeDirectory)
log.Info("Running initial directory scan...")
go dw.directoryScan(dw.config.BlackholeDirectory)
// Build and start a DirectoryWatcher
watcher := directory_watcher.NewDirectoryWatcher(dw.config.BlackholeDirectory,
false,
dw.checkFile,
dw.addFileToQueue,
)
if dw.watchDirectory != nil {
log.Info("Stopping directory watcher...")
err := dw.watchDirectory.Stop()
if err != nil {
log.Errorf("Error stopping directory watcher: %s", err)
}
}
watcher.Watch()
if dw.config.PollBlackholeDirectory {
log.Info("Starting directory poller...")
go func() {
for {
if !dw.config.PollBlackholeDirectory {
log.Info("Directory poller stopped")
break
}
time.Sleep(time.Duration(dw.config.PollBlackholeIntervalMinutes) * time.Minute)
log.Infof("Running directory scan of %s", dw.config.BlackholeDirectory)
dw.directoryScan(dw.config.BlackholeDirectory)
log.Infof("Scan complete, next scan in %d minutes", dw.config.PollBlackholeIntervalMinutes)
}
}()
} else {
log.Info("Starting directory watcher...")
dw.watchDirectory = directory_watcher.NewDirectoryWatcher(dw.config.BlackholeDirectory,
false,
dw.checkFile,
dw.addFileToQueue,
)
dw.watchDirectory.Watch()
}
}
func (dw *DirectoryWatcherService) initialDirectoryScan(p string) {
log.Trace("Initial directory scan")
func (dw *DirectoryWatcherService) directoryScan(p string) {
log.Trace("Running directory scan")
files, err := ioutil.ReadDir(p)
if err != nil {
log.Errorf("Error with initial directory scan %+v", err)
log.Errorf("Error with directory scan %+v", err)
return
}
for _, file := range files {
@@ -101,7 +138,7 @@ func (dw *DirectoryWatcherService) checkFile(path string) bool {
}
if fi.IsDir() {
log.Errorf("Directory created in blackhole %s ignoring (Warning premiumizearrzed does not look in subfolders!)", path)
log.Errorf("Directory created in blackhole %s ignoring (Warning premiumizearrd does not look in subfolders!)", path)
return false
}
@@ -119,7 +156,6 @@ func (dw *DirectoryWatcherService) addFileToQueue(path string) {
}
func (dw *DirectoryWatcherService) processUploads() {
//TODO: Global running state
for {
if dw.Queue.Len() < 1 {
log.Trace("No files in Queue, sleeping for 10 seconds")
@@ -146,7 +182,7 @@ func (dw *DirectoryWatcherService) processUploads() {
log.Trace("File already uploaded, removing from Disk")
os.Remove(filePath)
default:
log.Error(err)
log.Error("Error creating transfer: %s", err)
}
} else {
dw.status = "Okay"

View File

@@ -0,0 +1,12 @@
package service
import (
"github.com/jackdallas/premiumizearr/internal/config"
)
//Service interface
type Service interface {
New() (*config.Config, error)
Start() error
Stop() error
}

View File

@@ -7,7 +7,6 @@ import (
"sync"
"time"
"github.com/jackdallas/premiumizearr/internal/arr"
"github.com/jackdallas/premiumizearr/internal/config"
"github.com/jackdallas/premiumizearr/internal/progress_downloader"
"github.com/jackdallas/premiumizearr/internal/utils"
@@ -23,7 +22,7 @@ type DownloadDetails struct {
type TransferManagerService struct {
premiumizemeClient *premiumizeme.Premiumizeme
arrs *[]arr.IArr
arrsManager *ArrsManagerService
config *config.Config
lastUpdated int64
transfers []premiumizeme.Transfer
@@ -34,18 +33,48 @@ type TransferManagerService struct {
downloadsFolderID string
}
func NewTransferManagerService(pme *premiumizeme.Premiumizeme, arrs *[]arr.IArr, config *config.Config) TransferManagerService {
return TransferManagerService{
premiumizemeClient: pme,
arrs: arrs,
config: config,
lastUpdated: time.Now().Unix(),
transfers: make([]premiumizeme.Transfer, 0),
runningTask: false,
downloadListMutex: &sync.Mutex{},
downloadList: make(map[string]*DownloadDetails, 0),
status: "",
downloadsFolderID: "",
// Handle
func (t TransferManagerService) New() TransferManagerService {
t.premiumizemeClient = nil
t.arrsManager = nil
t.config = nil
t.lastUpdated = time.Now().Unix()
t.transfers = make([]premiumizeme.Transfer, 0)
t.runningTask = false
t.downloadListMutex = &sync.Mutex{}
t.downloadList = make(map[string]*DownloadDetails, 0)
t.status = ""
t.downloadsFolderID = ""
return t
}
func (t *TransferManagerService) Init(pme *premiumizeme.Premiumizeme, arrsManager *ArrsManagerService, config *config.Config) {
t.premiumizemeClient = pme
t.arrsManager = arrsManager
t.config = config
t.CleanUpUnzipDir()
}
func (t *TransferManagerService) CleanUpUnzipDir() {
log.Info("Cleaning unzip directory")
unzipBase, err := t.config.GetUnzipBaseLocation()
if err != nil {
log.Errorf("Error getting unzip base location: %s", err.Error())
return
}
err = utils.RemoveContents(unzipBase)
if err != nil {
log.Errorf("Error cleaning unzip directory: %s", err.Error())
return
}
}
func (manager *TransferManagerService) ConfigUpdatedCallback(currentConfig config.Config, newConfig config.Config) {
if currentConfig.UnzipDirectory != newConfig.UnzipDirectory {
manager.CleanUpUnzipDir()
}
}
@@ -54,7 +83,6 @@ func (manager *TransferManagerService) Run(interval time.Duration) {
for {
manager.runningTask = true
manager.TaskUpdateTransfersList()
//TODO: Seperate loop maybe
manager.TaskCheckPremiumizeDownloadsFolder()
manager.runningTask = false
manager.lastUpdated = time.Now().Unix()
@@ -77,14 +105,15 @@ func (manager *TransferManagerService) TaskUpdateTransfersList() {
log.Debug("Running Task UpdateTransfersList")
transfers, err := manager.premiumizemeClient.GetTransfers()
if err != nil {
log.Error(err)
log.Errorf("Error getting transfers: %s", err.Error())
return
}
manager.updateTransfers(transfers)
log.Tracef("Checking %d transfers against %d Arr clients", len(transfers), len(manager.arrsManager.GetArrs()))
for _, transfer := range transfers {
found := false
for _, arr := range *manager.arrs {
for _, arr := range manager.arrsManager.GetArrs() {
if found {
break
}
@@ -97,7 +126,7 @@ func (manager *TransferManagerService) TaskUpdateTransfersList() {
}
log.Tracef("Found %s in %s history", transfer.Name, arr.GetArrName())
found = true
log.Debugf("Processing transfer that has errored: ", transfer.Name)
log.Debugf("Processing transfer that has errored: %s", transfer.Name)
go arr.HandleErrorTransfer(&transfer, arrID, manager.premiumizemeClient)
}
@@ -107,15 +136,21 @@ func (manager *TransferManagerService) TaskUpdateTransfersList() {
func (manager *TransferManagerService) TaskCheckPremiumizeDownloadsFolder() {
log.Debug("Running Task CheckPremiumizeDownloadsFolder")
items, err := manager.premiumizemeClient.ListFolder(manager.downloadsFolderID)
if err != nil {
log.Error(err)
log.Errorf("Error listing downloads folder: %s", err.Error())
return
}
for _, item := range items {
log.Debugf("Processing completed item: %s", item.Name)
go manager.HandleFinishedItem(item, manager.config.DownloadsDirectory)
if manager.countDownloads() < manager.config.SimultaneousDownloads {
log.Debugf("Processing completed item: %s", item.Name)
manager.HandleFinishedItem(item, manager.config.DownloadsDirectory)
} else {
log.Debugf("Not processing any more transfers, %d are running and cap is %d", manager.countDownloads(), manager.config.SimultaneousDownloads)
break
}
}
}
@@ -134,6 +169,13 @@ func (manager *TransferManagerService) addDownload(item *premiumizeme.Item) {
}
}
func (manager *TransferManagerService) countDownloads() int {
manager.downloadListMutex.Lock()
defer manager.downloadListMutex.Unlock()
return len(manager.downloadList)
}
func (manager *TransferManagerService) removeDownload(name string) {
manager.downloadListMutex.Lock()
defer manager.downloadListMutex.Unlock()
@@ -154,7 +196,7 @@ func (manager *TransferManagerService) downloadExists(itemName string) bool {
return false
}
// Ran in a goroutine
// Returns when the download has been added to the list
func (manager *TransferManagerService) HandleFinishedItem(item premiumizeme.Item, downloadDirectory string) {
if manager.downloadExists(item.Name) {
log.Tracef("Transfer %s is already downloading", item.Name)
@@ -162,78 +204,78 @@ func (manager *TransferManagerService) HandleFinishedItem(item premiumizeme.Item
}
manager.addDownload(&item)
//Create entry in downloads map to lock item
// manager.downloadList[item.Name] = progress_downloader.NewWriteCounter()
log.Debug("Downloading: ", item.Name)
log.Tracef("%+v", item)
var link string
var err error
if item.Type == "file" {
link, err = manager.premiumizemeClient.GenerateZippedFileLink(item.ID)
} else if item.Type == "folder" {
link, err = manager.premiumizemeClient.GenerateZippedFolderLink(item.ID)
} else {
log.Errorf("Item is not of type 'file' or 'folder' !! Can't download %s", item.Name)
return
}
if err != nil {
log.Error(err)
go func() {
log.Debug("Downloading: ", item.Name)
log.Tracef("%+v", item)
var link string
var err error
if item.Type == "file" {
link, err = manager.premiumizemeClient.GenerateZippedFileLink(item.ID)
} else if item.Type == "folder" {
link, err = manager.premiumizemeClient.GenerateZippedFolderLink(item.ID)
} else {
log.Errorf("Item is not of type 'file' or 'folder' !! Can't download %s", item.Name)
return
}
if err != nil {
log.Error("Error generating download link: %s", err)
manager.removeDownload(item.Name)
return
}
log.Trace("Downloading from: ", link)
tempDir, err := manager.config.GetNewUnzipLocation()
if err != nil {
log.Errorf("Could not create temp dir: %s", err)
manager.removeDownload(item.Name)
return
}
splitString := strings.Split(link, "/")
savePath := path.Join(tempDir, splitString[len(splitString)-1])
log.Trace("Downloading to: ", savePath)
out, err := os.Create(savePath)
if err != nil {
log.Errorf("Could not create save path: %s", err)
manager.removeDownload(item.Name)
return
}
defer out.Close()
err = progress_downloader.DownloadFile(link, savePath, manager.downloadList[item.Name].ProgressDownloader)
if err != nil {
log.Errorf("Could not download file: %s", err)
manager.removeDownload(item.Name)
return
}
log.Tracef("Unzipping %s to %s", savePath, downloadDirectory)
err = utils.Unzip(savePath, downloadDirectory)
if err != nil {
log.Errorf("Could not unzip file: %s", err)
manager.removeDownload(item.Name)
return
}
log.Tracef("Removing zip %s from system", savePath)
err = os.RemoveAll(savePath)
if err != nil {
manager.removeDownload(item.Name)
log.Errorf("Could not remove zip: %s", err)
return
}
err = manager.premiumizemeClient.DeleteFolder(item.ID)
if err != nil {
manager.removeDownload(item.Name)
log.Error("Error deleting folder on premiumize.me: %s", err)
return
}
//Remove download entry from downloads map
manager.removeDownload(item.Name)
return
}
log.Trace("Downloading: ", link)
tempDir, err := utils.GetTempDir()
if err != nil {
log.Errorf("Could not create temp dir: %s", err)
manager.removeDownload(item.Name)
return
}
splitString := strings.Split(link, "/")
savePath := path.Join(tempDir, splitString[len(splitString)-1])
log.Trace("Downloading to: ", savePath)
out, err := os.Create(savePath)
if err != nil {
log.Errorf("Could not create save path: %s", err)
manager.removeDownload(item.Name)
return
}
defer out.Close()
err = progress_downloader.DownloadFile(link, savePath, manager.downloadList[item.Name].ProgressDownloader)
if err != nil {
log.Errorf("Could not download file: %s", err)
manager.removeDownload(item.Name)
return
}
log.Tracef("Unzipping %s to %s", savePath, downloadDirectory)
err = utils.Unzip(savePath, downloadDirectory)
if err != nil {
log.Errorf("Could not unzip file: %s", err)
manager.removeDownload(item.Name)
return
}
log.Tracef("Removing zip %s from system", savePath)
err = os.RemoveAll(savePath)
if err != nil {
manager.removeDownload(item.Name)
log.Errorf("Could not remove zip: %s", err)
return
}
err = manager.premiumizemeClient.DeleteFolder(item.ID)
if err != nil {
manager.removeDownload(item.Name)
log.Error(err)
return
}
//Remove download entry from downloads map
manager.removeDownload(item.Name)
}()
}

View File

@@ -0,0 +1,144 @@
package service
import (
"bytes"
"fmt"
"html/template"
"net/http"
"os"
"path/filepath"
"strings"
"time"
"github.com/gorilla/mux"
"github.com/jackdallas/premiumizearr/internal/config"
log "github.com/sirupsen/logrus"
)
type IndexTemplates struct {
RootPath string
}
var indexBytes []byte
type WebServerService struct {
transferManager *TransferManagerService
directoryWatcherService *DirectoryWatcherService
arrsManagerService *ArrsManagerService
config *config.Config
srv *http.Server
}
func (s WebServerService) New() WebServerService {
s.config = nil
s.transferManager = nil
s.directoryWatcherService = nil
s.arrsManagerService = nil
s.srv = nil
return s
}
func (s *WebServerService) ConfigUpdatedCallback(currentConfig config.Config, newConfig config.Config) {
if currentConfig.BindIP != newConfig.BindIP ||
currentConfig.BindPort != newConfig.BindPort ||
currentConfig.WebRoot != newConfig.WebRoot {
log.Tracef("Config updated, restarting web server...")
s.srv.Close()
s.Start()
}
}
func (s *WebServerService) Init(transferManager *TransferManagerService, directoryWatcher *DirectoryWatcherService, arrManager *ArrsManagerService, config *config.Config) {
s.transferManager = transferManager
s.directoryWatcherService = directoryWatcher
s.arrsManagerService = arrManager
s.config = config
}
func (s *WebServerService) Start() {
log.Info("Starting web server...")
tmpl, err := template.ParseFiles("./static/index.html")
if err != nil {
log.Fatal(err)
}
var ibytes bytes.Buffer
err = tmpl.Execute(&ibytes, &IndexTemplates{s.config.WebRoot})
if err != nil {
log.Fatal(err)
}
indexBytes = ibytes.Bytes()
spa := spaHandler{
staticPath: "static",
indexPath: "index.html",
webRoot: s.config.WebRoot,
}
r := mux.NewRouter()
r.HandleFunc("/api/transfers", s.TransfersHandler)
r.HandleFunc("/api/downloads", s.DownloadsHandler)
r.HandleFunc("/api/blackhole", s.BlackholeHandler)
r.HandleFunc("/api/config", s.ConfigHandler)
r.HandleFunc("/api/testArr", s.TestArrHandler)
r.PathPrefix("/").Handler(spa)
address := fmt.Sprintf("%s:%s", s.config.BindIP, s.config.BindPort)
s.srv = &http.Server{
Handler: r,
Addr: address,
// Good practice: enforce timeouts for servers you create!
WriteTimeout: 15 * time.Second,
ReadTimeout: 15 * time.Second,
}
log.Infof("Web server started on %s", address)
go s.srv.ListenAndServe()
}
// Shamelessly stolen from mux examples https://github.com/gorilla/mux#examples
type spaHandler struct {
staticPath string
indexPath string
webRoot string
}
func (h spaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
// get the absolute path to prevent directory traversal
path, err := filepath.Abs(r.URL.Path)
if err != nil {
// if we failed to get the absolute path respond with a 400 bad request
// and stop
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
if h.webRoot != "" {
path = strings.Replace(path, h.webRoot, "", 1)
}
// prepend the path with the path to the static directory
path = filepath.Join(h.staticPath, path)
// check whether a file exists at the given path
_, err = os.Stat(path)
if os.IsNotExist(err) || strings.HasSuffix(path, h.staticPath) {
// file does not exist, serve index.html
// http.ServeFile(w, r, filepath.Join(h.staticPath, h.indexPath))
// file does not exist, serve index.html template
w.Write(indexBytes)
return
} else if err != nil {
// if we got an error (that wasn't that the file doesn't exist) stating the
// file, return a 500 internal server error and stop
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
r.URL.Path = strings.Replace(path, h.staticPath, "", -1)
// otherwise, use http.FileServer to serve the static dir
http.FileServer(http.Dir(h.staticPath)).ServeHTTP(w, r)
}

View File

@@ -0,0 +1,56 @@
package service
import (
"encoding/json"
"fmt"
"net/http"
"github.com/jackdallas/premiumizearr/internal/config"
)
type ConfigChangeResponse struct {
Succeeded bool `json:"succeeded"`
Status string `json:"status"`
}
func (s *WebServerService) ConfigHandler(w http.ResponseWriter, r *http.Request) {
switch r.Method {
case http.MethodGet:
data, err := json.Marshal(s.config)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.Write(data)
case http.MethodPost:
var newConfig config.Config
err := json.NewDecoder(r.Body).Decode(&newConfig)
if err != nil {
EncodeAndWriteConfigChangeResponse(w, &ConfigChangeResponse{
Succeeded: false,
Status: fmt.Sprintf("Config failed to update %s", err.Error()),
})
return
}
s.config.UpdateConfig(newConfig)
EncodeAndWriteConfigChangeResponse(w, &ConfigChangeResponse{
Succeeded: true,
Status: "Config updated",
})
default:
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
}
}
func EncodeAndWriteConfigChangeResponse(w http.ResponseWriter, resp *ConfigChangeResponse) {
data, err := json.Marshal(resp)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.Write(data)
}

View File

@@ -0,0 +1,132 @@
package service
import (
"encoding/json"
"net/http"
"path"
"github.com/jackdallas/premiumizearr/internal/config"
"github.com/jackdallas/premiumizearr/pkg/premiumizeme"
)
type TransfersResponse struct {
Transfers []premiumizeme.Transfer `json:"data"`
Status string `json:"status"`
}
func (s *WebServerService) TransfersHandler(w http.ResponseWriter, r *http.Request) {
var resp TransfersResponse
resp.Transfers = *s.transferManager.GetTransfers()
resp.Status = s.transferManager.GetStatus()
data, err := json.Marshal(resp)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.Write(data)
}
type BlackholeFile struct {
ID int `json:"id"`
Name string `json:"name"`
}
type BlackholeResponse struct {
BlackholeFiles []BlackholeFile `json:"data"`
Status string `json:"status"`
}
type Download struct {
Added int64 `json:"added"`
Name string `json:"name"`
Progress string `json:"progress"`
Speed string `json:"speed"`
}
type DownloadsResponse struct {
Downloads []Download `json:"data"`
Status string `json:"status"`
}
func (s *WebServerService) DownloadsHandler(w http.ResponseWriter, r *http.Request) {
var resp DownloadsResponse
if s.transferManager == nil {
resp.Status = "Not Initialized"
} else {
for _, v := range s.transferManager.GetDownloads() {
resp.Downloads = append(resp.Downloads, Download{
Added: v.Added.Unix(),
Name: v.Name,
Progress: v.ProgressDownloader.GetProgress(),
Speed: v.ProgressDownloader.GetSpeed(),
})
}
resp.Status = ""
}
data, err := json.Marshal(resp)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.Write(data)
}
func (s *WebServerService) BlackholeHandler(w http.ResponseWriter, r *http.Request) {
var resp BlackholeResponse
if s.directoryWatcherService == nil {
resp.Status = "Not Initialized"
} else {
for i, n := range s.directoryWatcherService.Queue.GetQueue() {
name := path.Base(n)
resp.BlackholeFiles = append(resp.BlackholeFiles, BlackholeFile{
ID: i,
Name: name,
})
}
resp.Status = s.directoryWatcherService.GetStatus()
}
data, err := json.Marshal(resp)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.Write(data)
}
type TestArrResponse struct {
Status string `json:"status"`
Succeeded bool `json:"succeeded"`
}
func (s *WebServerService) TestArrHandler(w http.ResponseWriter, r *http.Request) {
var arr config.ArrConfig
err := json.NewDecoder(r.Body).Decode(&arr)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
err = TestArrConnection(arr)
var resp TestArrResponse
if err != nil {
resp.Status = err.Error()
resp.Succeeded = false
} else {
resp.Succeeded = true
}
data, err := json.Marshal(resp)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.Write(data)
}

View File

@@ -4,9 +4,7 @@ import (
"archive/zip"
"fmt"
"io"
"io/ioutil"
"os"
"path"
"path/filepath"
"strings"
@@ -23,21 +21,6 @@ func StripDownloadTypesExtention(fileName string) string {
return fileName
}
func GetTempBaseDir() string {
return path.Join(os.TempDir(), "premiumizearrd")
}
func GetTempDir() (string, error) {
// Create temp dir in os temp location
tempDir := GetTempBaseDir()
err := os.Mkdir(tempDir, os.ModePerm)
dir, err := ioutil.TempDir(tempDir, "unzip-")
if err != nil {
return "", err
}
return dir, nil
}
// https://golangcode.com/unzip-files-in-go/
func Unzip(src string, dest string) error {
r, err := zip.OpenReader(src)
@@ -104,7 +87,8 @@ func GetDownloadsFolderIDFromPremiumizeme(premiumizemeClient *premiumizeme.Premi
folders, err := premiumizemeClient.GetFolders()
if err != nil {
log.Errorf("Error getting folders: %s", err)
log.Fatalf("Cannot read folders from premiumize.me, exiting!")
log.Errorf("Cannot read folders from premiumize.me, application will not run!")
return ""
}
const folderName = "arrDownloads"
@@ -119,10 +103,71 @@ func GetDownloadsFolderIDFromPremiumizeme(premiumizemeClient *premiumizeme.Premi
if len(downloadsFolderID) == 0 {
id, err := premiumizemeClient.CreateFolder(folderName)
if err != nil {
log.Fatalf("Cannot create downloads folder on premiumize.me, exiting! %+v", err)
log.Errorf("Cannot create downloads folder on premiumize.me, application will not run correctly! %+v", err)
}
downloadsFolderID = id
}
return downloadsFolderID
}
func EnvOrDefault(envName string, defaultValue string) string {
envValue := os.Getenv(envName)
if len(envValue) == 0 {
return defaultValue
}
return envValue
}
func IsRunningInDockerContainer() bool {
// docker creates a .dockerenv file at the root
// of the directory tree inside the container.
// if this file exists then the viewer is running
// from inside a container so return true
if _, err := os.Stat("/.dockerenv"); err == nil {
return true
}
return false
}
func IsDirectoryWriteable(path string) bool {
if _, err := os.Stat(path); os.IsNotExist(err) {
log.Errorf("Directory does not exist: ", path)
return false
}
if _, err := os.Create(path + "/test.txt"); err != nil {
log.Errorf("Cannot write test.txt to directory: ", path)
return false
}
// Delete test file
if err := os.Remove(path + "/test.txt"); err != nil {
log.Errorf("Cannot delete test.txt file in: ", path)
return false
}
return true
}
//https://stackoverflow.com/questions/33450980/how-to-remove-all-contents-of-a-directory-using-golang
func RemoveContents(dir string) error {
d, err := os.Open(dir)
if err != nil {
return err
}
defer d.Close()
names, err := d.Readdirnames(-1)
if err != nil {
return err
}
for _, name := range names {
err = os.RemoveAll(filepath.Join(dir, name))
if err != nil {
return err
}
}
return nil
}

View File

@@ -1,201 +0,0 @@
package web_service
import (
"bytes"
"encoding/json"
"fmt"
"html/template"
"net/http"
"os"
"path"
"path/filepath"
"strings"
"time"
"github.com/gorilla/mux"
"github.com/jackdallas/premiumizearr/internal/config"
"github.com/jackdallas/premiumizearr/internal/service"
"github.com/jackdallas/premiumizearr/pkg/premiumizeme"
log "github.com/sirupsen/logrus"
)
type IndexTemplates struct {
RootPath string
}
var indexBytes []byte
const webRoot = "premiumizearr"
type server struct {
transferManager *service.TransferManagerService
directoryWatcherService *service.DirectoryWatcherService
}
// http Router
func StartWebServer(transferManager *service.TransferManagerService, directoryWatcher *service.DirectoryWatcherService, config *config.Config) {
tmpl, err := template.ParseFiles("./static/index.html")
if err != nil {
log.Fatal(err)
}
var ibytes bytes.Buffer
err = tmpl.Execute(&ibytes, &IndexTemplates{webRoot})
if err != nil {
log.Fatal(err)
}
indexBytes = ibytes.Bytes()
s := server{
transferManager: transferManager,
directoryWatcherService: directoryWatcher,
}
spa := spaHandler{
staticPath: "static",
indexPath: "index.html",
}
r := mux.NewRouter()
log.Infof("Creating route: %s", webRoot+"/api/transfers")
r.HandleFunc("/"+webRoot+"/api/transfers", s.TransfersHandler)
log.Infof("Creating route: %s", webRoot+"/api/downloads")
r.HandleFunc("/"+webRoot+"/api/downloads", s.DownloadsHandler)
log.Infof("Creating route: %s", webRoot+"/api/blackhole")
r.HandleFunc("/"+webRoot+"/api/blackhole", s.BlackholeHandler)
r.PathPrefix("/").Handler(spa)
srv := &http.Server{
Handler: r,
Addr: fmt.Sprintf("%s:%s", config.BindIP, config.BindPort),
// Good practice: enforce timeouts for servers you create!
WriteTimeout: 15 * time.Second,
ReadTimeout: 15 * time.Second,
}
srv.ListenAndServe()
}
type TransfersResponse struct {
Transfers []premiumizeme.Transfer `json:"data"`
Status string `json:"status"`
}
func (s *server) TransfersHandler(w http.ResponseWriter, r *http.Request) {
var resp TransfersResponse
resp.Transfers = *s.transferManager.GetTransfers()
resp.Status = s.transferManager.GetStatus()
data, err := json.Marshal(resp)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.Write(data)
}
type BlackholeFile struct {
ID int `json:"id"`
Name string `json:"name"`
}
type BlackholeResponse struct {
BlackholeFiles []BlackholeFile `json:"data"`
Status string `json:"status"`
}
type Download struct {
Added int64 `json:"added"`
Name string `json:"name"`
Progress string `json:"progress"`
Speed string `json:"speed"`
}
type DownloadsResponse struct {
Downloads []Download `json:"data"`
Status string `json:"status"`
}
func (s *server) DownloadsHandler(w http.ResponseWriter, r *http.Request) {
var resp DownloadsResponse
for _, v := range s.transferManager.GetDownloads() {
resp.Downloads = append(resp.Downloads, Download{
Added: v.Added.Unix(),
Name: v.Name,
Progress: v.ProgressDownloader.GetProgress(),
Speed: v.ProgressDownloader.GetSpeed(),
})
}
resp.Status = ""
data, err := json.Marshal(resp)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.Write(data)
}
func (s *server) BlackholeHandler(w http.ResponseWriter, r *http.Request) {
var resp BlackholeResponse
for i, n := range s.directoryWatcherService.Queue.GetQueue() {
name := path.Base(n)
resp.BlackholeFiles = append(resp.BlackholeFiles, BlackholeFile{
ID: i,
Name: name,
})
}
resp.Status = s.directoryWatcherService.GetStatus()
data, err := json.Marshal(resp)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.Write(data)
}
// Shamlessly stolen from mux examples https://github.com/gorilla/mux#examples
type spaHandler struct {
staticPath string
indexPath string
}
func (h spaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
// get the absolute path to prevent directory traversal
path, err := filepath.Abs(r.URL.Path)
if err != nil {
// if we failed to get the absolute path respond with a 400 bad request
// and stop
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
path = strings.Replace(path, webRoot, "", 1)
// prepend the path with the path to the static directory
path = filepath.Join(h.staticPath, path)
// check whether a file exists at the given path
_, err = os.Stat(path)
if os.IsNotExist(err) || strings.HasSuffix(path, h.staticPath) {
// file does not exist, serve index.html
// http.ServeFile(w, r, filepath.Join(h.staticPath, h.indexPath))
// file does not exist, serve index.html template
w.Write(indexBytes)
return
} else if err != nil {
// if we got an error (that wasn't that the file doesn't exist) stating the
// file, return a 500 internal server error and stop
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
r.URL.Path = strings.Replace(path, h.staticPath, "", -1)
// otherwise, use http.FileServer to serve the static dir
http.FileServer(http.Dir(h.staticPath)).ServeHTTP(w, r)
}

View File

@@ -22,8 +22,8 @@ type Premiumizeme struct {
APIKey string
}
func NewPremiumizemeClient(APIKey string) *Premiumizeme {
return &Premiumizeme{APIKey: APIKey}
func NewPremiumizemeClient(APIKey string) Premiumizeme {
return Premiumizeme{APIKey: APIKey}
}
func (pm *Premiumizeme) createPremiumizemeURL(urlPath string) (url.URL, error) {
@@ -38,7 +38,15 @@ func (pm *Premiumizeme) createPremiumizemeURL(urlPath string) (url.URL, error) {
return *u, nil
}
var (
ErrAPIKeyNotSet = fmt.Errorf("premiumize.me API key not set")
)
func (pm *Premiumizeme) GetTransfers() ([]Transfer, error) {
if pm.APIKey == "" {
return nil, ErrAPIKeyNotSet
}
log.Trace("Getting transfers list from premiumize.me")
url, err := pm.createPremiumizemeURL("/transfer/list")
if err != nil {
@@ -70,6 +78,10 @@ func (pm *Premiumizeme) GetTransfers() ([]Transfer, error) {
}
func (pm *Premiumizeme) ListFolder(folderID string) ([]Item, error) {
if pm.APIKey == "" {
return nil, ErrAPIKeyNotSet
}
var ret []Item
url, err := pm.createPremiumizemeURL("/folder/list")
if err != nil {
@@ -112,6 +124,10 @@ func (pm *Premiumizeme) ListFolder(folderID string) ([]Item, error) {
}
func (pm *Premiumizeme) GetFolders() ([]Item, error) {
if pm.APIKey == "" {
return nil, ErrAPIKeyNotSet
}
log.Trace("Getting folder list from premiumize.me")
url, err := pm.createPremiumizemeURL("/folder/list")
if err != nil {
@@ -143,6 +159,10 @@ func (pm *Premiumizeme) GetFolders() ([]Item, error) {
}
func (pm *Premiumizeme) CreateTransfer(filePath string, parentID string) error {
if pm.APIKey == "" {
return ErrAPIKeyNotSet
}
//TODO: handle file size, i.e. incorrect file being saved
log.Trace("Opening file: ", filePath)
file, err := os.Open(filePath)
@@ -203,6 +223,10 @@ func (pm *Premiumizeme) CreateTransfer(filePath string, parentID string) error {
}
func (pm *Premiumizeme) DeleteFolder(folderID string) error {
if pm.APIKey == "" {
return ErrAPIKeyNotSet
}
url, err := pm.createPremiumizemeURL("/folder/delete")
if err != nil {
return err
@@ -246,6 +270,10 @@ func (pm *Premiumizeme) DeleteFolder(folderID string) error {
}
func (pm *Premiumizeme) CreateFolder(folderName string) (string, error) {
if pm.APIKey == "" {
return "", ErrAPIKeyNotSet
}
url, err := pm.createPremiumizemeURL("/folder/create")
if err != nil {
return "", err
@@ -289,6 +317,10 @@ func (pm *Premiumizeme) CreateFolder(folderName string) (string, error) {
}
func (pm *Premiumizeme) DeleteTransfer(id string) error {
if pm.APIKey == "" {
return ErrAPIKeyNotSet
}
url, err := pm.createPremiumizemeURL("/transfer/delete")
if err != nil {
return err
@@ -437,6 +469,10 @@ func (pm *Premiumizeme) GenerateZippedFolderLink(fileID string) (string, error)
}
func (pm *Premiumizeme) generateZip(ID string, srcType SRCType) (string, error) {
if pm.APIKey == "" {
return "", ErrAPIKeyNotSet
}
// Build URL with apikey
URL, err := pm.createPremiumizemeURL("/zip/generate")
if err != nil {

6
scripts/postinstall.sh Normal file
View File

@@ -0,0 +1,6 @@
#!/bin/bash
chown -R 1000:1000 /opt/premiumizearrd/
systemctl enable premiumizearrd.service
systemctl daemon-reload
systemctl start premiumizearrd.service

1
web/.node-version Normal file
View File

@@ -0,0 +1 @@
v16

11474
web/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -2,25 +2,25 @@
"name": "premiumizearr-ui",
"version": "0.0.1",
"devDependencies": {
"carbon-components-svelte": "^0.49.0",
"carbon-icons-svelte": "^10.38.0",
"carbon-preprocess-svelte": "^0.6.0",
"copy-webpack-plugin": "^9.1.0",
"cross-env": "^7.0.3",
"css-loader": "^5.0.1",
"esbuild-loader": "^2.16.0",
"mini-css-extract-plugin": "^1.3.4",
"svelte": "^3.31.2",
"carbon-components-svelte": "^0.64.0",
"carbon-icons-svelte": "^11.0.0",
"carbon-preprocess-svelte": "^0.9.0",
"copy-webpack-plugin": "^9.0.0",
"cross-env": "^7.0.0",
"css-loader": "^5.0.0",
"esbuild-loader": "^2.0.0",
"mini-css-extract-plugin": "^1.0.0",
"svelte": "^3.49.0",
"svelte-loader": "^3.0.0",
"webpack": "^5.16.0",
"webpack-cli": "^4.4.0",
"webpack-dev-server": "^4.7.3"
"webpack": "^5.0.0",
"webpack-cli": "^4.0.0",
"webpack-dev-server": "^4.0.0"
},
"scripts": {
"build": "cross-env NODE_ENV=production webpack",
"dev": "webpack serve --content-base public"
"dev": "webpack serve --static public"
},
"dependencies": {
"luxon": "^2.3.0"
"luxon": "^2.0.0"
}
}

View File

@@ -6,13 +6,13 @@
<title>Premiumizearr</title>
<link rel='icon' type='image/png' href='/favicon.png'>
<link rel='stylesheet' href='/{{.RootPath}}/bundle.css'>
<link rel='icon' type='image/png' href='./{{.RootPath}}/favicon.png'>
<link rel='stylesheet' href='./{{.RootPath}}/bundle.css'>
<!-- Material Icons -->
<link rel="stylesheet" href="https://fonts.googleapis.com/icon?family=Material+Icons" />
<!-- Roboto -->
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Roboto:300,400,500,600,700" />
<script defer src='/{{.RootPath}}/bundle.js'></script>
<script defer src='./{{.RootPath}}/bundle.js'></script>
</head>
<body>

View File

@@ -1,150 +1,31 @@
<script>
import APITable from "./components/APITable.svelte";
import "carbon-components-svelte/css/g100.css";
import { Grid, Row, Column } from "carbon-components-svelte";
import DateTime from "luxon";
let dlSpeed = 0;
let webRoot = new URL(window.location.href).pathname;
function parseDLSpeedFromMessage(m) {
if (m == "Loading..." || m == undefined) return 0;
let speed = m.split(" ")[0];
speed = speed.replace(",", "");
let unit = m.split(" ")[1];
if (Number.isNaN(speed)) {
console.log("Speed is not a number: ", speed);
console.log("Message: ", message);
return 0;
}
if (unit === undefined || unit === null || unit == "") {
console.log("Unit undefined in : " + m);
return 0;
} else {
try {
unit = unit.toUpperCase();
} catch (error) {
return 0;
}
unit = unit.replace("/", "");
unit = unit.substring(0, 2);
switch (unit) {
case "KB":
return speed * 1024;
case "MB":
return speed * 1024 * 1024;
case "GB":
return speed * 1024 * 1024 * 1024;
default:
console.log("Unknown unit: " + unit);
return 0;
}
}
}
function HumanReadableSpeed(bytes) {
if (bytes < 1024) {
return bytes + " B/s";
} else if (bytes < 1024 * 1024) {
return (bytes / 1024).toFixed(2) + " KB/s";
} else if (bytes < 1024 * 1024 * 1024) {
return (bytes / 1024 / 1024).toFixed(2) + " MB/s";
} else {
return (bytes / 1024 / 1024 / 1024).toFixed(2) + " GB/s";
}
}
function dataToRows(data) {
let rows = [];
dlSpeed = 0;
if (!data) return rows;
for (let i = 0; i < data.length; i++) {
let d = data[i];
rows.push({
id: d.id,
name: d.name,
status: d.status,
progress: (d.progress * 100).toFixed(0) + "%",
message: d.message,
});
let speed = parseDLSpeedFromMessage(d.message);
if (!Number.isNaN(speed)) {
dlSpeed += speed;
} else {
console.error("Invalid speed: " + d.message);
}
}
return rows;
}
function downloadsToRows(downloads) {
let rows = [];
if (!downloads) return rows;
for (let i = 0; i < downloads.length; i++) {
let d = downloads[i];
rows.push({
Added: DateTime.fromMillis(d.added).toFormat('dd hh:mm:ss a'),
name: d.name,
progress: (d.progress * 100).toFixed(0) + "%",
});
}
}
</script>
<main>
<Grid fullWidth>
<Row>
<Column md={4} >
<h3>Blackhole</h3>
<APITable
headers={[
{ key: "id", value: "Pos" },
{ key: "name", value: "Name", sort: false },
]}
{webRoot}
APIpath="/api/blackhole"
zebra={true}
totalName="In Queue: "
/>
</Column>
<Column md={4} >
<h3>Downloads</h3>
<APITable
headers={[
{ key: "added", value: "Added" },
{ key: "name", value: "Name" },
{ key: "progress", value: "Progress" },
{ key: "speed", value: "Speed" },
]}
updateTimeSeconds={2}
{webRoot}
APIpath="/api/downloads"
zebra={true}
totalName="Downloading: "
/>
</Column>
</Row>
<Row>
<Column>
<h3>Transfers</h3>
<p>Download Speed: {HumanReadableSpeed(dlSpeed)}</p>
<APITable
headers={[
{ key: "name", value: "Name" },
{ key: "status", value: "Status" },
{ key: "progress", value: "Progress" },
{ key: "message", value: "Message", sort: false },
]}
{webRoot}
APIpath="/api/transfers"
zebra={true}
{dataToRows}
/>
</Column>
</Row>
</Grid>
</main>
<script>
import "carbon-components-svelte/css/g100.css";
import {
Grid,
Row,
Column,
Tabs,
Tab,
TabContent,
} from "carbon-components-svelte";
import Config from "./pages/Config.svelte";
import Info from "./pages/Info.svelte";
</script>
<main>
<Grid fullWidth>
<Row>
<Column>
<Tabs>
<Tab label="Info" />
<Tab label="Config" />
<svelte:fragment slot="content">
<TabContent><Info /></TabContent>
<TabContent><Config /></TabContent>
</svelte:fragment>
</Tabs>
</Column>
</Row>
</Grid>
</main>

View File

@@ -0,0 +1,21 @@
export function CalculateAPIPath(path) {
let webRoot = window.location.href;
if (webRoot.indexOf("index.html") > -1) {
webRoot = webRoot.substring(0, webRoot.indexOf("index.html"));
}
if (webRoot[webRoot.length - 1] !== "/") {
webRoot += "/";
}
if (path[0] == "/") {
// console.log(webRoot + path.substring(1));
return webRoot + path.substring(1);
}
// console.log(webRoot + path);
return webRoot + path;
}

View File

@@ -1,14 +1,13 @@
<script>
import { DataTable, InlineLoading } from "carbon-components-svelte";
import { CalculateAPIPath } from "../Utilities/web_root";
export let totalName = "";
export let headers = {};
export let webRoot = "";
export let updateTimeSeconds = 10;
export let APIpath = "/api/transfers";
export let dataToRows = function (data) {
if (!data)
return [];
if (!data) return [];
return data;
};
@@ -21,7 +20,7 @@
if (updating) return;
// Refresh from endpoint
updating = true;
fetch(webRoot + APIpath)
fetch(CalculateAPIPath(APIpath))
.then((res) => res.json())
.then((data) => {
rows = dataToRows(data.data);
@@ -46,16 +45,17 @@
<main>
{#if totalName !== ""}
<p>
{totalName} {safeLength(rows)}
{totalName}
{safeLength(rows)}
</p>
{/if}
<p>
<InlineLoading status={statusIndicator} description="Update status" />
<InlineLoading status={statusIndicator} description="Update status" />
</p>
<p>
Message: {status}
Message: {status}
</p>
<p>
<DataTable sortable {headers} {rows} />
<DataTable sortable {headers} {rows} />
</p>
</main>

351
web/src/pages/Config.svelte Normal file
View File

@@ -0,0 +1,351 @@
<script>
import {
Row,
Column,
Button,
TextInput,
Modal,
FormGroup,
Dropdown,
Form,
Checkbox,
} from "carbon-components-svelte";
import {
Save,
CheckmarkFilled,
AddFilled,
TrashCan,
HelpFilled,
MisuseOutline,
WatsonHealthRotate_360,
} from "carbon-icons-svelte";
import { CalculateAPIPath } from "../Utilities/web_root";
let config = {
BlackholeDirectory: "",
PollBlackholeDirectory: false,
PollBlackholeIntervalMinutes: 10,
DownloadsDirectory: "",
UnzipDirectory: "",
BindIP: "",
BindPort: "",
WebRoot: "",
SimultaneousDownloads: 0,
Arrs: [],
};
const ERR_SAVE = "Error Saving Config";
const ERR_TEST = "Error Testing *arr client";
let arrTesting = [];
let arrTestIcons = [];
let arrTestKind = [];
let inputDisabled = true;
let errorModal = false;
let errorTitle = ERR_SAVE;
let errorMessage = "";
let saveIcon = Save;
function getConfig() {
inputDisabled = true;
fetch(CalculateAPIPath("api/config"))
.then((response) => response.json())
.then((data) => {
if (Array.isArray(data.Arrs)) {
for (let i = 0; i < data.Arrs.length; i++) {
SetTestArr(i, HelpFilled, "secondary", false);
}
}
config = data;
inputDisabled = false;
})
.catch((error) => {
console.error("Error: ", error);
});
}
function submit() {
inputDisabled = true;
fetch(CalculateAPIPath("api/config"), {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(config),
})
.then((response) => response.json())
.then((data) => {
if (data.succeeded) {
saveIcon = CheckmarkFilled;
getConfig();
setTimeout(() => {
saveIcon = Save;
}, 1000);
} else {
errorMessage = data.status;
errorTitle = ERR_SAVE;
errorModal = true;
getConfig();
}
})
.catch((error) => {
console.error("Error: ", error);
errorTitle = ERR_SAVE;
errorMessage = error;
errorModal = true;
setTimeout(() => {
getConfig();
}, 1500);
});
}
function AddArr() {
config.Arrs.push({
Name: "New Arr",
URL: "http://localhost:1234",
APIKey: "xxxxxxxx",
Type: "Sonarr",
});
//Force re-paint
config.Arrs = [...config.Arrs];
}
function RemoveArr(index) {
config.Arrs.splice(index, 1);
//Force re-paint
config.Arrs = [...config.Arrs];
}
function TestArr(index) {
SetTestArr(index, WatsonHealthRotate_360, "secondary", true);
fetch(CalculateAPIPath("api/testArr"), {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(config.Arrs[index]),
})
.then((response) => response.json())
.then((data) => {
if (data.succeeded) {
SetTestArr(index, CheckmarkFilled, "primary", false);
ResetArrTestDelayed(index, 10);
} else {
SetTestArr(index, MisuseOutline, "danger", false);
ResetArrTestDelayed(index, 5);
errorTitle = ERR_TEST;
errorMessage = data.status;
errorModal = true;
}
})
.catch((error) => {
console.error("Error: ", error);
SetTestArr(index, MisuseOutline, "danger", false);
ResetArrTestDelayed(index, 5);
errorTitle = ERR_TEST;
errorMessage = error;
errorModal = true;
});
}
function UntestArr(index) {
SetTestArr(index, HelpFilled, "secondary", false);
}
function SetTestArr(index, icon, kind, testing) {
arrTesting[index] = testing;
arrTestIcons[index] = icon;
arrTestKind[index] = kind;
arrTesting = [...arrTesting];
arrTestIcons = [...arrTestIcons];
arrTestKind = [...arrTestKind];
}
function ResetArrTestDelayed(index, seconds) {
setTimeout(() => {
SetTestArr(index, HelpFilled, "secondary", false);
}, 1000 * seconds);
}
getConfig();
</script>
<main>
<Row>
<Column>
<h4>*Arr Settings</h4>
<FormGroup>
{#if config.Arrs !== undefined}
{#each config.Arrs as arr, i}
<h5>- {arr.Name ? arr.Name : i}</h5>
<FormGroup>
<TextInput
labelText="Name"
bind:value={arr.Name}
disabled={inputDisabled}
on:input={() => {
UntestArr(i);
}}
/>
<TextInput
labelText="URL"
bind:value={arr.URL}
disabled={inputDisabled}
on:input={() => {
UntestArr(i);
}}
/>
<TextInput
labelText="APIKey"
bind:value={arr.APIKey}
disabled={inputDisabled}
on:input={() => {
UntestArr(i);
}}
/>
<Dropdown
titleText="Type"
selectedId={arr.Type}
on:select={(e) => {
config.Arrs[i].Type = e.detail.selectedId;
UntestArr(i);
}}
items={[
{ id: "Sonarr", text: "Sonarr" },
{ id: "Radarr", text: "Radarr" },
]}
disabled={inputDisabled}
/>
<Button
style="margin-top: 10px;"
on:click={() => {
RemoveArr(i);
}}
kind="danger"
icon={TrashCan}
iconDescription="Delete Arr"
/>
<Button
style="margin-top: 10px;"
on:click={() => {
TestArr(i);
}}
disabled={arrTesting[i]}
kind={arrTestKind[i]}
icon={arrTestIcons[i]}
>
Test
</Button>
</FormGroup>
{/each}
{/if}
</FormGroup>
<Button on:click={AddArr} disabled={inputDisabled} icon={AddFilled}>
Add Arr
</Button>
</Column>
<Column>
<h4>Premiumize.me Settings</h4>
<FormGroup>
<TextInput
disabled={inputDisabled}
labelText="API Key"
bind:value={config.PremiumizemeAPIKey}
/>
</FormGroup>
<h4>Directory Settings</h4>
<FormGroup>
<TextInput
disabled={inputDisabled}
labelText="Blackhole Directory"
bind:value={config.BlackholeDirectory}
/>
<Checkbox
disabled={inputDisabled}
bind:checked={config.PollBlackholeDirectory}
labelText="Poll Blackhole Directory"
/>
<TextInput
type="number"
disabled={inputDisabled}
labelText="Poll Blackhole Interval Minutes"
bind:value={config.PollBlackholeIntervalMinutes}
/>
</FormGroup>
<FormGroup>
<TextInput
disabled={inputDisabled}
labelText="Download Directory"
bind:value={config.DownloadsDirectory}
/>
<TextInput
disabled={inputDisabled}
labelText="Unzip Directory"
bind:value={config.UnzipDirectory}
/>
</FormGroup>
<h4>Web Server Settings</h4>
<FormGroup>
<TextInput
disabled={inputDisabled}
labelText="Bind IP"
bind:value={config.BindIP}
/>
<TextInput
disabled={inputDisabled}
labelText="Bind Port"
bind:value={config.BindPort}
/>
<TextInput
disabled={inputDisabled}
labelText="Web Root"
bind:value={config.WebRoot}
/>
</FormGroup>
<h4>Download Settings</h4>
<FormGroup>
<TextInput
type="number"
disabled={inputDisabled}
labelText="Simultaneous Downloads"
bind:value={config.SimultaneousDownloads}
/>
</FormGroup>
<Button on:click={submit} icon={saveIcon} disabled={inputDisabled}
>Save</Button
>
</Column>
</Row>
</main>
<Modal
bind:open={errorModal}
on:open={errorModal}
passiveModal
modalHeading={errorTitle}
on:close={() => {
errorModal = false;
}}
>
<p>{errorMessage}</p>
</Modal>
<!--
{() => {
console.log(testStatus.get(i));
if (testStatus.get(i) == undefined)
return "secondary";
if (testStatus.get(i) === 3) {
return "danger";
} else {
return "secondary";
}
}}
-->

144
web/src/pages/Info.svelte Normal file
View File

@@ -0,0 +1,144 @@
<script>
import APITable from "../components/APITable.svelte";
import { Row, Column } from "carbon-components-svelte";
import {DateTime} from "luxon";
let dlSpeed = 0;
function parseDLSpeedFromMessage(m) {
if (m == "Loading..." || m == undefined) return 0;
if (m == "too many missing articles") return 0;
let speed = m.split(" ")[0];
speed = speed.replace(",", "");
let unit = m.split(" ")[1];
if (Number.isNaN(speed)) {
console.log("Speed is not a number: ", speed);
console.log("Message: ", message);
return 0;
}
if (unit === undefined || unit === null || unit == "") {
console.log("Unit undefined in : " + m);
return 0;
} else {
try {
unit = unit.toUpperCase();
} catch (error) {
return 0;
}
unit = unit.replace("/", "");
unit = unit.substring(0, 2);
switch (unit) {
case "KB":
return speed * 1024;
case "MB":
return speed * 1024 * 1024;
case "GB":
return speed * 1024 * 1024 * 1024;
default:
console.log("Unknown unit: " + unit + " in message '" + m + "'");
return 0;
}
}
}
function HumanReadableSpeed(bytes) {
if (bytes < 1024) {
return bytes + " B/s";
} else if (bytes < 1024 * 1024) {
return (bytes / 1024).toFixed(2) + " KB/s";
} else if (bytes < 1024 * 1024 * 1024) {
return (bytes / 1024 / 1024).toFixed(2) + " MB/s";
} else {
return (bytes / 1024 / 1024 / 1024).toFixed(2) + " GB/s";
}
}
function dataToRows(data) {
let rows = [];
dlSpeed = 0;
if (!data) return rows;
for (let i = 0; i < data.length; i++) {
let d = data[i];
rows.push({
id: d.id,
name: d.name,
status: d.status,
progress: (d.progress * 100).toFixed(0) + "%",
message: d.message,
});
let speed = parseDLSpeedFromMessage(d.message);
if (!Number.isNaN(speed)) {
dlSpeed += speed;
} else {
console.error("Invalid speed: " + d.message);
}
}
return rows;
}
function downloadsToRows(downloads) {
let rows = [];
if (!downloads) return rows;
for (let i = 0; i < downloads.length; i++) {
let d = downloads[i];
rows.push({
Added: DateTime.fromMillis(d.added).toFormat('dd hh:mm:ss a'),
name: d.name,
progress: (d.progress * 100).toFixed(0) + "%",
});
}
}
</script>
<main>
<Row>
<Column md={4} >
<h3>Blackhole</h3>
<APITable
headers={[
{ key: "id", value: "Pos" },
{ key: "name", value: "Name", sort: false },
]}
APIpath="api/blackhole"
zebra={true}
totalName="In Queue: "
/>
</Column>
<Column md={4} >
<h3>Downloads</h3>
<APITable
headers={[
{ key: "added", value: "Added" },
{ key: "name", value: "Name" },
{ key: "progress", value: "Progress" },
{ key: "speed", value: "Speed" },
]}
updateTimeSeconds={2}
APIpath="api/downloads"
zebra={true}
totalName="Downloading: "
/>
</Column>
</Row>
<Row>
<Column>
<h3>Transfers</h3>
<p>Download Speed: {HumanReadableSpeed(dlSpeed)}</p>
<APITable
headers={[
{ key: "name", value: "Name" },
{ key: "status", value: "Status" },
{ key: "progress", value: "Progress" },
{ key: "message", value: "Message", sort: false },
]}
APIpath="api/transfers"
zebra={true}
{dataToRows}
/>
</Column>
</Row>
</main>

View File

@@ -61,7 +61,7 @@ module.exports = {
devServer: {
hot: true,
proxy: {
'/api': 'https://projectmouseion.com/premiumizearr/api'
'/api': 'http://localhost:8182'
}
},
optimization: {