2 Commits

Author SHA1 Message Date
c32fdde780 alacritty-ligatures: fix cargoDeps output hash 2023-06-20 16:23:09 +10:00
b1195b3d50 ci: check flake in build job 2023-06-20 16:22:48 +10:00
47 changed files with 1233 additions and 10835 deletions

View File

@ -8,17 +8,14 @@ jobs:
strategy:
matrix:
check:
# - nixpkgs-fmt
# - deadnix
- nixpkgs-fmt
- deadnix
- nur
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: https://gitea.com/actions/checkout@v3
- name: Install nix
uses: https://github.com/cachix/install-nix-action@v23
with:
github_access_token: ${{ secrets.INPUT_GITHUB_ACCESS_TOKEN }}
uses: actions/checkout@v3
- uses: cachix/install-nix-action@v22
- name: Check ${{ matrix.check }}
# Depends on nixos/nix#7759 to simply `nix flake check`
run: nix run .#checks.$(nix eval --raw --impure --expr "builtins.currentSystem").${{ matrix.check }}
@ -46,17 +43,19 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: https://gitea.com/actions/checkout@v3
uses: actions/checkout@v3
- name: Install nix
uses: https://github.com/cachix/install-nix-action@v23
uses: cachix/install-nix-action@v22
- name: Show nixpkgs version
run: nix eval --impure --expr '(import ./flake-compat.nix { src = ./.; }).lib.version'
- name: Setup cachix
uses: https://github.com/cachix/cachix-action@v12
uses: cachix/cachix-action@v12
if: ${{ matrix.cachixName != '<YOUR_CACHIX_NAME>' }}
with:
name: ${{ matrix.cachixName }}
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
- name: Check nix
run: nix flake check --show-trace
- name: Build nix packages
run: nix develop .#ci -c nix-build-uncached ci.nix -A cacheOutputs
- name: Trigger NUR update

View File

@ -11,13 +11,13 @@ jobs:
fail-fast: false
steps:
- name: Checkout repository
uses: actions/checkout@v4
- uses: cachix/install-nix-action@v23
uses: actions/checkout@v3
- uses: cachix/install-nix-action@v22
with:
extra_nix_config: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
- name: "flake: update inputs"
uses: DeterminateSystems/update-flake-lock@v20
uses: DeterminateSystems/update-flake-lock@v19
with:
pr-title: "flake: update inputs"
pr-labels: dependencies

15
callUnitRoot.nix Normal file
View File

@ -0,0 +1,15 @@
{ pkgs
, lib ? pkgs.lib
, unitDir ? "unit"
, packageFun ? "package.nix"
, root ? "${./pkgs}/${unitDir}"
}:
let
shards = lib.attrNames (builtins.readDir root);
namesForShard = shard: lib.mapAttrs'
(name: _: { inherit name; value = root + "/${shard}/${name}"; })
(builtins.readDir (root + "/${shard}"));
namesToPath = lib.foldl' lib.recursiveUpdate { } (map namesForShard shards);
units = lib.mapAttrs (_: path: pkgs.callPackage (path + "/${packageFun}") { }) namesToPath;
in
units

View File

@ -8,10 +8,10 @@
{ pkgs ? import <nixpkgs> { } }:
let
system = pkgs.stdenv.hostPlatform.system;
packages = import ./pkgs/top-level { localSystem = system; inherit pkgs; };
legacy = import ./pkgs/top-level/all-packages.nix { inherit pkgs; };
units = import ./callUnitRoot.nix { inherit pkgs; root = ./pkgs/unit; };
in
packages // {
legacy // units // {
# The `lib`, `modules`, and `overlay` names are special
lib = import ./lib { inherit pkgs; }; # functions
modules = import ./modules; # NixOS modules

12
flake.lock generated
View File

@ -5,11 +5,11 @@
"systems": "systems"
},
"locked": {
"lastModified": 1694529238,
"narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=",
"lastModified": 1685518550,
"narHash": "sha256-o2d0KcvaXzTrPRIo0kOLV0/QXHhDQ5DTi+OxcjO8xqY=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "ff7b65b44d01cf9ba6a71320833626af21126384",
"rev": "a1720a10a6cfe8234c0e93907ffe81be440f4cef",
"type": "github"
},
"original": {
@ -20,11 +20,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1699725108,
"narHash": "sha256-NTiPW4jRC+9puakU4Vi8WpFEirhp92kTOSThuZke+FA=",
"lastModified": 1687103638,
"narHash": "sha256-dwy/TK6Db5W7ivcgmcxUykhFwodIg0jrRzOFt7H5NUc=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "911ad1e67f458b6bcf0278fa85e33bb9924fed7e",
"rev": "91430887645a0953568da2f3e9a3a3bb0a0378ac",
"type": "github"
},
"original": {

View File

@ -15,16 +15,24 @@
pkgs = import nixpkgs { inherit system; };
in
{
packages = import ./pkgs/top-level { localSystem = system; inherit pkgs; };
# nixos/rfcs#140
# Only produces the package set of the proposed functionality.
# Unstable names are variables.
packages =
let
legacyPackages = import ./pkgs/top-level/all-packages.nix { inherit pkgs; };
unitPackages = import ./callUnitRoot.nix { inherit pkgs; };
onlyAvailable = lib.filterAttrs (_: drv: builtins.elem system (drv.meta.platforms or [ ]));
in
onlyAvailable (legacyPackages // unitPackages);
checks = {
# FIXME: Disabled until I can work out what to do with generated code.
# nixpkgs-fmt = pkgs.writeShellScriptBin "nixpkgs-fmt-check" ''
# ${pkgs.nixpkgs-fmt}/bin/nixpkgs-fmt --check .
# '';
# deadnix = pkgs.writeShellScriptBin "deadnix-check" ''
# ${pkgs.deadnix}/bin/deadnix --fail .
# '';
nixpkgs-fmt = pkgs.writeShellScriptBin "nixpkgs-fmt-check" ''
${pkgs.nixpkgs-fmt}/bin/nixpkgs-fmt --check .
'';
deadnix = pkgs.writeShellScriptBin "deadnix-check" ''
${pkgs.deadnix}/bin/deadnix --fail .
'';
# Ensures that the NUR bot can evaluate and find all our packages.
# Normally we'd also run with `--option restrict-eval true`, but
# this is incompatible with flakes because reasons.
@ -50,6 +58,8 @@
apps = {
alacritty = mkApp { drv = pkgs.alacritty-ligatures; exePath = "/bin/alacritty"; };
protonmail-bridge = mkApp { drv = pkgs.protonmail-bridge; };
protonmail-bridge-headless = mkApp { drv = pkgs.protonmail-bridge; };
psst-cli = mkApp { drv = pkgs.psst; exePath = "/bin/psst-cli"; };
psst-gui = mkApp { drv = pkgs.psst; exePath = "/bin/psst-gui"; };
samrewritten = mkApp { drv = pkgs.samrewritten; };

View File

@ -2,9 +2,7 @@
amdgpu-common = ./services/hardware/amdgpu-common.nix;
amdgpu-fan = ./services/hardware/amdgpu-fan.nix;
amdgpu-pwm = ./services/hardware/amdgpu-pwm.nix;
betanin = ./services/web-apps/betanin.nix;
dunst = ./services/x11/dunst.nix;
porkbun-ddns = ./services/networking/porkbun-ddns.nix;
radeon-profile-daemon = ./services/hardware/radeon-profile-daemon.nix;
}

View File

@ -1,74 +0,0 @@
{ config, lib, pkgs, ... }:
let
inherit (lib) mkOption types;
cfg = config.services.porkbun-ddns;
in
{
options = {
services.porkbun-ddns = {
enable = lib.mkEnableOption "Porkbun dynamic DNS client";
package = mkOption {
# TODO: How do I use mkPackageOption when the package isn't in the
# package set?
type = types.package;
default = pkgs.callPackage ../../../pkgs/by-name/po/porkbun-ddns/package.nix { };
defaultText = "pkgs.porkbun-ddns";
description = lib.mdDoc "The porkbun-ddns package to use.";
};
interval = mkOption {
type = types.str;
default = "10m";
description = lib.mdDoc ''
Interval to update dynamic DNS records. The default is to update every
10 minutes. The format is described in {manpage}`systemd.time(7)`.
'';
};
domains = mkOption {
type = types.listOf types.str;
default = [ ];
description = lib.mdDoc "Domains to update.";
};
apiKeyFile = mkOption {
type = types.nullOr types.path;
description = lib.mdDoc ''
File containing the API key to use when running the client.
'';
};
secretApiKeyFile = mkOption {
type = types.nullOr types.path;
description = lib.mdDoc ''
File containing the secret API key to use when running the
client.
'';
};
};
};
config = lib.mkIf cfg.enable {
systemd.services.porkbun-ddns = {
description = "Porkbun dynamic DNS client";
script = ''
${cfg.package}/bin/porkbun-ddns \
-K ${cfg.apiKeyFile} \
-S ${cfg.secretApiKeyFile} \
${lib.concatStringsSep " " cfg.domains}
'';
};
systemd.timers.porkbun-ddns = {
description = "Porkbun dynamic DNS client";
wants = [ "network-online.target" ];
wantedBy = [ "timers.target" ];
timerConfig = {
OnBootSec = cfg.interval;
OnUnitActiveSec = cfg.interval;
};
};
};
}

View File

@ -1,169 +0,0 @@
{ config, lib, pkgs, ... }:
let
inherit (builtins) hashString;
inherit (lib) mkIf mkOption optionalAttrs types;
cfg = config.services.betanin;
defaultUser = "betanin";
defaultGroup = "betanin";
settingsFormat = pkgs.formats.toml { };
beetsFormat = pkgs.formats.yaml { };
in
{
options = {
services.betanin = {
enable = lib.mkEnableOption "betanin";
package = mkOption {
description = "Package containing betanin program.";
type = types.package;
default = pkgs.betanin or (import ../../.. { inherit pkgs; }).betanin;
};
openFirewall = mkOption {
description = "Open ports in the firewall for the server.";
type = types.bool;
default = false;
};
port = mkOption {
description = "Port to access betanin on.";
type = types.port;
default = 9393;
};
user = mkOption {
description = "User that the betanin program should run under.";
type = types.str;
default = defaultUser;
};
group = mkOption {
description = "Group that the betanin program should run under.";
type = types.str;
default = defaultGroup;
};
dataDir = mkOption {
description = "Directory to store application data.";
type = types.str;
default = "/var/lib/betanin";
};
settings = mkOption {
type = settingsFormat.type;
default = { };
example = lib.literalExpression ''
{
frontend = {
username = "foo";
password { _secret = "/run/secrets/betaninPasswordFile"; };
};
clients = {
api_key = { _secret = "/run/secrets/betaninApiKeyFile"; };
};
server = {
num_parallel_jobs = 1;
};
}
'';
description = lib.mdDoc ''
Configuration for betanin.
Options containing secret data should be set to an attribute set
containing the attribute `_secret` - a string pointing to a file
containing the value the option should be set to.
'';
};
beets.settings = mkOption {
type = beetsFormat.type;
default = { };
description = lib.mdDoc "Configuration for beets used by betanin.";
};
};
};
config = mkIf cfg.enable {
services.betanin.settings = {
notifications = {
# Required to exist.
services = { };
strings = {
title = lib.mkDefault "[betanin] torrent `$name` $status";
body = lib.mkDefault "@ $time. view/use the console at http://127.0.0.1:${toString cfg.port}/$console_path";
};
};
};
networking.firewall = mkIf cfg.openFirewall {
allowedTCPPorts = [ cfg.port ];
};
systemd.services.betanin =
let
isSecret = v: lib.isAttrs v && v ? _secret && lib.isString v._secret;
sanitisedConfig = lib.mapAttrsRecursiveCond
(as: !isSecret as)
(_: v: if isSecret v then hashString "sha256" v._secret else v)
cfg.settings;
settingsFile = settingsFormat.generate "betanin.toml" sanitisedConfig;
secretPaths = lib.catAttrs "_secret" (lib.collect isSecret cfg.settings);
mkSecretReplacement = file: ''
replace-secret ${hashString "sha256" file} ${file} "${cfg.dataDir}/.config/betanin/config.toml"
'';
secretReplacements = lib.concatMapStrings mkSecretReplacement secretPaths;
beetsFile = beetsFormat.generate "betanin-beets.yaml" cfg.beets.settings;
in
{
description = "Betanin service";
wantedBy = [ "multi-user.target" ];
after = [ "networking.target" ];
environment = {
HOME = cfg.dataDir;
};
path = [ pkgs.replace-secret ];
script = ''
mkdir -p ${cfg.dataDir}/.config/betanin \
${cfg.dataDir}/.local/share/betanin \
${cfg.dataDir}/.config/beets
ln -sf ${beetsFile} ${cfg.dataDir}/.config/beets/config.yaml
cat ${settingsFile} > ${cfg.dataDir}/.config/betanin/config.toml
${secretReplacements}
${cfg.package}/bin/betanin --port ${toString cfg.port}
'';
serviceConfig = lib.mkMerge [
{
User = cfg.user;
Group = cfg.group;
PrivateTmp = true;
Restart = "always";
WorkingDirectory = cfg.dataDir;
}
(mkIf (cfg.dataDir == "/var/lib/betanin") {
StateDirectory = "betanin";
})
];
};
users.users = optionalAttrs (cfg.user == defaultUser) {
${cfg.user} = {
isSystemUser = true;
group = cfg.group;
};
};
users.groups = optionalAttrs (cfg.group == defaultGroup) {
${cfg.group} = { };
};
};
}

View File

@ -0,0 +1,11 @@
diff --git a/include/components/types.hpp b/include/components/types.hpp
index 8125d4b..c435f4a 100644
--- a/include/components/types.hpp
+++ b/include/components/types.hpp
@@ -43,5 +43,6 @@ enum class controltag {
DOUBLE_MIDDLE,
DOUBLE_RIGHT,
+ EXTRA,
// Terminator value, do not use
BTN_COUNT,
};

View File

@ -0,0 +1,44 @@
{ qtbase, go, goModules }:
{
pname = "protonmail-bridge";
tags = "pmapi_prod";
QT_PKG_CONFIG = "true";
QT_VERSION = qtbase.version;
nativeBuildInputs = [
goModules.qt
qtbase
];
buildPhase = ''
cp cmd/Desktop-Bridge/main.go .
## Enable writable vendor
GOMODULE=gomodule
mv vendor $GOMODULE-vendor
mkdir vendor
readarray -t files < <(find $GOMODULE-vendor/ -type f | grep -v github.com/therecipe/qt | sed "s/$GOMODULE-//")
for f in "''${files[@]}"; do
mkdir -p $(dirname $f)
cp -s $PWD/$GOMODULE-$f $f
done
unset GOMODULE
##
mkdir -p vendor/github.com/therecipe
cp -r gomodule-vendor/github.com/therecipe/qt vendor/github.com/therecipe/qt
chmod -R a+w vendor/github.com/therecipe/qt
# Add vendor to GOPATH because fuck
mkdir -p $GOPATH
ln -s $PWD/vendor $GOPATH/src
qtsetup check
GOROOT=${go}/share/go qtdeploy "''${buildFlagsArray[@]}" build desktop
'';
meta.broken = true;
}

View File

@ -0,0 +1,48 @@
{ lib
, fetchFromGitHub
, buildGoModule
, pkg-config
, libsecret
}:
{ pname
, tags
, ...
}@args:
buildGoModule (lib.recursiveUpdate args rec {
inherit pname;
version = "1.8.10";
src = fetchFromGitHub {
owner = "ProtonMail";
repo = "proton-bridge";
rev = "br-${version}";
sha256 = "1na8min9cmn82lpad58abw6837k303fr09l6cvzswaxs73f231ig";
};
vendorSha256 = "1219xa1347877bfhnid15y6w9s4hf1czbrmll2iha4gpsmg066bb";
nativeBuildInputs = (args.nativeBuildInputs or [ ]) ++ [
pkg-config
];
buildInputs = (args.buildInputs or [ ]) ++ [
libsecret
];
inherit tags;
ldflags = [
"-X github.com/ProtonMail/proton-bridge/pkg/constants.Version=${version}"
"-X github.com/ProtonMail/proton-bridge/pkg/constants.Revision=${version}"
"-X github.com/ProtonMail/proton-bridge/pkg/constants.BuildDate=unknown"
];
meta = with lib; {
description = "Integrate ProtonMail paid account with any program that supports IMAP and SMTP";
homepage = "https://protonmail.com";
license = licenses.gpl3;
plaforms = platforms.x86_64;
};
})

View File

@ -0,0 +1,20 @@
{ lib
, fetchFromGitHub
, buildGoModule
, go
, goModules
, libsecret
, pkg-config
, qtbase
, qtdoc
}:
let
builder = import ./common.nix {
inherit lib fetchFromGitHub buildGoModule libsecret pkg-config;
};
in
{
protonmail-bridge = builder (import ./app.nix { inherit qtbase go goModules; });
protonmail-bridge-headless = builder (import ./headless.nix { });
}

View File

@ -0,0 +1,768 @@
# file generated from go.mod using vgo2nix (https://github.com/adisbladis/vgo2nix)
[
{
goPackagePath = "github.com/0xAX/notificator";
fetch = {
type = "git";
url = "https://github.com/0xAX/notificator";
rev = "3962a5ea8da1";
sha256 = "1lkn6mmghsd4a2h3na8x0r5xgckjn3c0v5vsp6bzhl2k1zxb640p";
};
}
{
goPackagePath = "github.com/BurntSushi/toml";
fetch = {
type = "git";
url = "https://github.com/BurntSushi/toml";
rev = "v0.3.1";
sha256 = "1fjdwwfzyzllgiwydknf1pwjvy49qxfsczqx5gz3y0izs7as99j6";
};
}
{
goPackagePath = "github.com/ProtonMail/go-appdir";
fetch = {
type = "git";
url = "https://github.com/ProtonMail/go-appdir";
rev = "v1.1.0";
sha256 = "1pl43h5f79g2r0z98glrxasbb69yzigpd85csl7zmyhg63vzbkqw";
};
}
{
goPackagePath = "github.com/ProtonMail/go-apple-mobileconfig";
fetch = {
type = "git";
url = "https://github.com/ProtonMail/go-apple-mobileconfig";
rev = "7ea9927a11f6";
sha256 = "1vlm9m54xmq0r8bhgwzdmxnxf97sz01lgi6lw8pcjbljmzb9l75f";
};
}
{
goPackagePath = "github.com/ProtonMail/go-autostart";
fetch = {
type = "git";
url = "https://github.com/ProtonMail/go-autostart";
rev = "c5272053443a";
sha256 = "0cjnsbqrkhlapma2070cqcxr4wkvv55is3byqn8zmkzi0l3217nf";
};
}
{
goPackagePath = "github.com/ProtonMail/go-imap-id";
fetch = {
type = "git";
url = "https://github.com/ProtonMail/go-imap-id";
rev = "ed0baee567ee";
sha256 = "1kljw64pi3kabbd3gi7zy9wqzaypsxrdi5mfwf6akl438bq44jm4";
};
}
{
goPackagePath = "github.com/ProtonMail/go-mime";
fetch = {
type = "git";
url = "https://github.com/ProtonMail/go-mime";
rev = "09454e3dbe72";
sha256 = "1ncca6191nf0lsfhhhdxdbhxg3b2q1cmvjhq2428g0b50dlfkrnn";
};
}
{
goPackagePath = "github.com/ProtonMail/go-vcard";
fetch = {
type = "git";
url = "https://github.com/ProtonMail/go-vcard";
rev = "33aaa0a0c8a5";
sha256 = "19cify6lxd2yirqc92yfgzvn5qlc8a01a2kxjdg83jv0lx6ps26q";
};
}
{
goPackagePath = "github.com/ProtonMail/gopenpgp";
fetch = {
type = "git";
url = "https://github.com/ProtonMail/gopenpgp";
rev = "d398098113ed";
sha256 = "0ry69mymb6q00g4khdbzllrz7b44zhxvdzavhz6936n9dzsay13v";
};
}
{
goPackagePath = "github.com/abiosoft/ishell";
fetch = {
type = "git";
url = "https://github.com/abiosoft/ishell";
rev = "v2.0.0";
sha256 = "11r6l133aaz6khm60x0a410ckpzvqzv2az7z5b088c2vddnp538r";
};
}
{
goPackagePath = "github.com/abiosoft/readline";
fetch = {
type = "git";
url = "https://github.com/abiosoft/readline";
rev = "155bce2042db";
sha256 = "104q8dazj8yf6b089jjr82fy9h1g80zyyzvp3g8b44a7d8ngjj6r";
};
}
{
goPackagePath = "github.com/allan-simon/go-singleinstance";
fetch = {
type = "git";
url = "https://github.com/allan-simon/go-singleinstance";
rev = "79edcfdc2dfc";
sha256 = "06xrxifz5cd98iji7v46zq9xf63zf77sqbr72w1ics3s2hyfkpjz";
};
}
{
goPackagePath = "github.com/andybalholm/cascadia";
fetch = {
type = "git";
url = "https://github.com/andybalholm/cascadia";
rev = "v1.1.0";
sha256 = "1slh68ysbixc21gbni4msrgq971n59gnn2x3ys196jd413a6qf0f";
};
}
{
goPackagePath = "github.com/cention-sany/utf7";
fetch = {
type = "git";
url = "https://github.com/cention-sany/utf7";
rev = "26cad61bd60a";
sha256 = "1jy15ryfcln1iwchrksqyrnyfy41gisymm4f9sr1d73ja029bznm";
};
}
{
goPackagePath = "github.com/certifi/gocertifi";
fetch = {
type = "git";
url = "https://github.com/certifi/gocertifi";
rev = "c7c1fbc02894";
sha256 = "018bsy1vclsdk2kns9f37giabibg3kggk3vpj0yr3dv0k72gzybk";
};
}
{
goPackagePath = "github.com/chzyer/logex";
fetch = {
type = "git";
url = "https://github.com/chzyer/logex";
rev = "v1.1.10";
sha256 = "08pbjj3wx9acavlwyr055isa8a5hnmllgdv5k6ra60l5y1brmlq4";
};
}
{
goPackagePath = "github.com/chzyer/test";
fetch = {
type = "git";
url = "https://github.com/chzyer/test";
rev = "a1ea475d72b1";
sha256 = "0rns2aqk22i9xsgyap0pq8wi4cfaxsri4d9q6xxhhyma8jjsnj2k";
};
}
{
goPackagePath = "github.com/cpuguy83/go-md2man";
fetch = {
type = "git";
url = "https://github.com/cpuguy83/go-md2man";
rev = "f79a8a8ca69d";
sha256 = "0r1f7v475dxxgzqci1mxfliwadcrk86ippflx9n411325l4g3ghv";
};
}
{
goPackagePath = "github.com/cucumber/godog";
fetch = {
type = "git";
url = "https://github.com/cucumber/godog";
rev = "v0.8.1";
sha256 = "00bplmx3r7mjaxyf1ky1mh9ps17p78656xjpkqf3wkfhchssm3ny";
};
}
{
goPackagePath = "github.com/danieljoos/wincred";
fetch = {
type = "git";
url = "https://github.com/danieljoos/wincred";
rev = "v1.0.2";
sha256 = "1ym8mygjrf3rw1qka9irw76b2yisr2l5pq2w581s40yldnbfq4lc";
};
}
{
goPackagePath = "github.com/davecgh/go-spew";
fetch = {
type = "git";
url = "https://github.com/davecgh/go-spew";
rev = "v1.1.1";
sha256 = "0hka6hmyvp701adzag2g26cxdj47g21x6jz4sc6jjz1mn59d474y";
};
}
{
goPackagePath = "github.com/docker/docker-credential-helpers";
fetch = {
type = "git";
url = "https://github.com/ProtonMail/docker-credential-helpers";
rev = "0326642117d8";
sha256 = "02qfw1w6v14kfjlc6slg7hvv2raan5263ivymdn2pjm3hdcgs7lh";
};
}
{
goPackagePath = "github.com/emersion/go-imap";
fetch = {
type = "git";
url = "https://github.com/ProtonMail/go-imap";
rev = "0e686f0e855f";
sha256 = "1gijqy3wq6lbqbrljl4l12w5snji3c9fpvywhk9wyd56mdsy7wyr";
};
}
{
goPackagePath = "github.com/emersion/go-imap-appendlimit";
fetch = {
type = "git";
url = "https://github.com/emersion/go-imap-appendlimit";
rev = "beeb382f2a42";
sha256 = "089kgnryvignl9z5c3fi5bxyc3jl7jgmz6ykhk73n8nqp8kgi43b";
};
}
{
goPackagePath = "github.com/emersion/go-imap-idle";
fetch = {
type = "git";
url = "https://github.com/emersion/go-imap-idle";
rev = "e03ba1e0ed89";
sha256 = "0yavndwl9g3q4j97659kq1mpyc22q27vq0lg0f7v704irmbbkanp";
};
}
{
goPackagePath = "github.com/emersion/go-imap-move";
fetch = {
type = "git";
url = "https://github.com/emersion/go-imap-move";
rev = "88aef42b0f1d";
sha256 = "006l76qxkf7gnb0kc7r9xmxg7nmmwfw8sf5lsg05093rwcg232f4";
};
}
{
goPackagePath = "github.com/emersion/go-imap-quota";
fetch = {
type = "git";
url = "https://github.com/ProtonMail/go-imap-quota";
rev = "20f0ba8904de";
sha256 = "1qvhzsm0wjg0ndk963l9wcmdiyacbrmslhvmg1zhj3k9np41ravv";
};
}
{
goPackagePath = "github.com/emersion/go-imap-specialuse";
fetch = {
type = "git";
url = "https://github.com/emersion/go-imap-specialuse";
rev = "ba031ced6a62";
sha256 = "0f8rxgr4z5a7nphhz6vnsqgjr04wwrvdcnsxp5rl2lh60yvj3wig";
};
}
{
goPackagePath = "github.com/emersion/go-imap-unselect";
fetch = {
type = "git";
url = "https://github.com/emersion/go-imap-unselect";
rev = "1e6dc73ac8fe";
sha256 = "0higgjn41ksgsl11zphljbz690i17swzhfkij51dghjnj9q287ff";
};
}
{
goPackagePath = "github.com/emersion/go-sasl";
fetch = {
type = "git";
url = "https://github.com/emersion/go-sasl";
rev = "430746ea8b9b";
sha256 = "11rhbayr1w3zhpl3q7gd6i15qz97pk0k4xs0n39m91hrgl1fj03c";
};
}
{
goPackagePath = "github.com/emersion/go-smtp";
fetch = {
type = "git";
url = "https://github.com/ProtonMail/go-smtp";
rev = "8261df20d309";
sha256 = "0n6k7xbffzsxfjcc74q21nh8dp5s44v3xh9hrsi7a147n8qm83iq";
};
}
{
goPackagePath = "github.com/emersion/go-textwrapper";
fetch = {
type = "git";
url = "https://github.com/emersion/go-textwrapper";
rev = "d0e65e56babe";
sha256 = "1nw8qpjjbpkz49wd19yg2qsln1dmdfxi83wp2aa819cv6xxf2y7l";
};
}
{
goPackagePath = "github.com/emersion/go-vcard";
fetch = {
type = "git";
url = "https://github.com/emersion/go-vcard";
rev = "8856043f13c5";
sha256 = "1dzw5awqbkf2nc09ynmn3zlylj4n1na96rziv5z8p42b1bmbklwk";
};
}
{
goPackagePath = "github.com/fatih/color";
fetch = {
type = "git";
url = "https://github.com/fatih/color";
rev = "v1.9.0";
sha256 = "086z8ssmr1fn9ba4mqnw7pnccfpys6l5yfhvycv1gdrsk7n27mvs";
};
}
{
goPackagePath = "github.com/flynn-archive/go-shlex";
fetch = {
type = "git";
url = "https://github.com/flynn-archive/go-shlex";
rev = "3f9db97f8568";
sha256 = "1j743lysygkpa2s2gii2xr32j7bxgc15zv4113b0q9jhn676ysia";
};
}
{
goPackagePath = "github.com/getsentry/raven-go";
fetch = {
type = "git";
url = "https://github.com/getsentry/raven-go";
rev = "v0.2.0";
sha256 = "0imfwmsb72168fqandf2lxhzhngf2flxhzaar8hcnnfjv2a291lf";
};
}
{
goPackagePath = "github.com/go-resty/resty";
fetch = {
type = "git";
url = "https://github.com/go-resty/resty";
rev = "v2.2.0";
sha256 = "0khfcq07kq6z2vq0i42an8fd16pl1rc980rlf7c1x4szsf8qcajm";
};
}
{
goPackagePath = "github.com/go-test/deep";
fetch = {
type = "git";
url = "https://github.com/go-test/deep";
rev = "v1.0.2";
sha256 = "1dax5bbp95lvkns0jjwf6l3rqj5q4xxd37whpqa8nyyjcakdxnqd";
};
}
{
goPackagePath = "github.com/gogs/chardet";
fetch = {
type = "git";
url = "https://github.com/gogs/chardet";
rev = "2404f7772561";
sha256 = "1dki2pqhnzcmzlqrq4d4jwknnjxm82xqnmizjjdblb6h98ans1cd";
};
}
{
goPackagePath = "github.com/golang/mock";
fetch = {
type = "git";
url = "https://github.com/golang/mock";
rev = "v1.4.3";
sha256 = "1p37xnja1dgq5ykx24n7wincwz2gahjh71b95p8vpw7ss2g8j8wx";
};
}
{
goPackagePath = "github.com/google/go-cmp";
fetch = {
type = "git";
url = "https://github.com/google/go-cmp";
rev = "v0.4.0";
sha256 = "1x5pvl3fb5sbyng7i34431xycnhmx8xx94gq2n19g6p0vz68z2v2";
};
}
{
goPackagePath = "github.com/gopherjs/gopherjs";
fetch = {
type = "git";
url = "https://github.com/gopherjs/gopherjs";
rev = "3e4dfb77656c";
sha256 = "0bmapn4dskpr4a79kcr5irkw19px4a71ls5gspffxiva7sapgyvw";
};
}
{
goPackagePath = "github.com/hashicorp/errwrap";
fetch = {
type = "git";
url = "https://github.com/hashicorp/errwrap";
rev = "v1.0.0";
sha256 = "0slfb6w3b61xz04r32bi0a1bygc82rjzhqkxj2si2074wynqnr1c";
};
}
{
goPackagePath = "github.com/hashicorp/go-multierror";
fetch = {
type = "git";
url = "https://github.com/hashicorp/go-multierror";
rev = "v1.0.0";
sha256 = "00nyn8llqzbfm8aflr9kwsvpzi4kv8v45c141v88xskxp5xf6z49";
};
}
{
goPackagePath = "github.com/jameskeane/bcrypt";
fetch = {
type = "git";
url = "https://github.com/ProtonMail/bcrypt";
rev = "7509ea014998";
sha256 = "12xi8i4sb6q4h4wd6w1phqpzxpff5c629ard8cnkjp7qmznvcc20";
};
}
{
goPackagePath = "github.com/jaytaylor/html2text";
fetch = {
type = "git";
url = "https://github.com/jaytaylor/html2text";
rev = "61d9dc4d7195";
sha256 = "19cn6k8anx8w2ar8kwza6vlijim2xbj4hqxy1m79y0m386b2hapl";
};
}
{
goPackagePath = "github.com/jhillyerd/enmime";
fetch = {
type = "git";
url = "https://github.com/jhillyerd/enmime";
rev = "v0.8.0";
sha256 = "1wzz3hzf8bnn0wbxznzrfdrs4x7qxdgqpf1xirlsisq8z0r8hn9z";
};
}
{
goPackagePath = "github.com/kardianos/osext";
fetch = {
type = "git";
url = "https://github.com/kardianos/osext";
rev = "2bc1f35cddc0";
sha256 = "1pvrbrvmrf4mx0fxbfaphbzgqgwn8v6lkfk2vyrs0znxrs1xyc5r";
};
}
{
goPackagePath = "github.com/keybase/go-keychain";
fetch = {
type = "git";
url = "https://github.com/keybase/go-keychain";
rev = "86d4642e4ce2";
sha256 = "05prxx2bbcqwk5lwk1gdqsrwy256mp4k4im316h9ar3sh42frha6";
};
}
{
goPackagePath = "github.com/konsorten/go-windows-terminal-sequences";
fetch = {
type = "git";
url = "https://github.com/konsorten/go-windows-terminal-sequences";
rev = "v1.0.2";
sha256 = "09mn209ika7ciy87xf2x31dq5fnqw39jidgaljvmqxwk7ff1hnx7";
};
}
{
goPackagePath = "github.com/logrusorgru/aurora";
fetch = {
type = "git";
url = "https://github.com/logrusorgru/aurora";
rev = "e9ef32dff381";
sha256 = "19laya9dav84miw3d0c9vgiv577wzrhydv5mdiii59sgbd780hhq";
};
}
{
goPackagePath = "github.com/mattn/go-colorable";
fetch = {
type = "git";
url = "https://github.com/mattn/go-colorable";
rev = "v0.1.4";
sha256 = "1yxcz08kminqr1221zxpibnbzfcgs3fafin0z9zqb3gqvf74jywz";
};
}
{
goPackagePath = "github.com/mattn/go-isatty";
fetch = {
type = "git";
url = "https://github.com/mattn/go-isatty";
rev = "v0.0.11";
sha256 = "0h671sv7hfprja495kavazkalkx7xzaqksjh13brcnwq67ijrali";
};
}
{
goPackagePath = "github.com/mattn/go-runewidth";
fetch = {
type = "git";
url = "https://github.com/mattn/go-runewidth";
rev = "v0.0.4";
sha256 = "00b3ssm7wiqln3k54z2wcnxr3k3c7m1ybyhb9h8ixzbzspld0qzs";
};
}
{
goPackagePath = "github.com/miekg/dns";
fetch = {
type = "git";
url = "https://github.com/miekg/dns";
rev = "v1.1.29";
sha256 = "1bkk930cg46w1akii60bqqkgy9h9axlcfnqk252r6w8qqpf4z5hh";
};
}
{
goPackagePath = "github.com/myesui/uuid";
fetch = {
type = "git";
url = "https://github.com/myesui/uuid";
rev = "v1.0.0";
sha256 = "1si8cfjin9dyzprnyvfk8pckbsvg97hzs9hffz91505lpidlb9bc";
};
}
{
goPackagePath = "github.com/nsf/jsondiff";
fetch = {
type = "git";
url = "https://github.com/nsf/jsondiff";
rev = "8443391ee9b6";
sha256 = "1zxk0mzil5x197zaqjld403f3vvzilgvciq1z62zl1c1vbwbpwch";
};
}
{
goPackagePath = "github.com/olekukonko/tablewriter";
fetch = {
type = "git";
url = "https://github.com/olekukonko/tablewriter";
rev = "v0.0.1";
sha256 = "0hh95glg7d2md185r03wn52j2r33jc4zil0qvcrs66ka7bdxi7vj";
};
}
{
goPackagePath = "github.com/pkg/errors";
fetch = {
type = "git";
url = "https://github.com/pkg/errors";
rev = "v0.9.1";
sha256 = "1761pybhc2kqr6v5fm8faj08x9bql8427yqg6vnfv6nhrasx1mwq";
};
}
{
goPackagePath = "github.com/pmezard/go-difflib";
fetch = {
type = "git";
url = "https://github.com/pmezard/go-difflib";
rev = "v1.0.0";
sha256 = "0c1cn55m4rypmscgf0rrb88pn58j3ysvc2d0432dp3c6fqg6cnzw";
};
}
{
goPackagePath = "github.com/russross/blackfriday";
fetch = {
type = "git";
url = "https://github.com/russross/blackfriday";
rev = "v2.0.1";
sha256 = "0nlz7isdd4rgnwzs68499hlwicxz34j2k2a0b8jy0y7ycd2bcr5j";
};
}
{
goPackagePath = "github.com/saintfish/chardet";
fetch = {
type = "git";
url = "https://github.com/saintfish/chardet";
rev = "3af4cd4741ca";
sha256 = "0czh50md64k9lbllayq0asir3174saxb88yzxrh640yhfxd98pcb";
};
}
{
goPackagePath = "github.com/shurcooL/sanitized_anchor_name";
fetch = {
type = "git";
url = "https://github.com/shurcooL/sanitized_anchor_name";
rev = "v1.0.0";
sha256 = "1gv9p2nr46z80dnfjsklc6zxbgk96349sdsxjz05f3z6wb6m5l8f";
};
}
{
goPackagePath = "github.com/sirupsen/logrus";
fetch = {
type = "git";
url = "https://github.com/sirupsen/logrus";
rev = "v1.4.2";
sha256 = "087k2lxrr9p9dh68yw71d05h5g9p5v26zbwd6j7lghinjfaw334x";
};
}
{
goPackagePath = "github.com/skratchdot/open-golang";
fetch = {
type = "git";
url = "https://github.com/skratchdot/open-golang";
rev = "eef842397966";
sha256 = "0n6387csjn024db8wldadsiy8ljz7lk7szl6ls28fcbkax7rw86y";
};
}
{
goPackagePath = "github.com/ssor/bom";
fetch = {
type = "git";
url = "https://github.com/ssor/bom";
rev = "6386211fdfcf";
sha256 = "09g5496ifwqxqclh2iw58plcwcz0sczlnxwqxzwmnl4shdl371ld";
};
}
{
goPackagePath = "github.com/stretchr/objx";
fetch = {
type = "git";
url = "https://github.com/stretchr/objx";
rev = "v0.2.0";
sha256 = "0pcdvakxgddaiwcdj73ra4da05a3q4cgwbpm2w75ycq4kzv8ij8k";
};
}
{
goPackagePath = "github.com/stretchr/testify";
fetch = {
type = "git";
url = "https://github.com/stretchr/testify";
rev = "v1.5.1";
sha256 = "09r89m1wy4cjv2nps1ykp00qjpi0531r07q3s34hr7m6njk4srkl";
};
}
{
goPackagePath = "github.com/therecipe/qt";
fetch = {
type = "git";
url = "https://github.com/therecipe/qt";
rev = "5074eb6d8c41";
sha256 = "1zpil531gjravag76p1032c7wizpskzanymdpc74rs8ixckws9zi";
};
}
{
goPackagePath = "github.com/twinj/uuid";
fetch = {
type = "git";
url = "https://github.com/twinj/uuid";
rev = "v1.0.0";
sha256 = "1si8cfjin9dyzprnyvfk8pckbsvg97hzs9hffz91505lpidlb9bc";
};
}
{
goPackagePath = "github.com/urfave/cli";
fetch = {
type = "git";
url = "https://github.com/urfave/cli";
rev = "v1.22.3";
sha256 = "1lz6i6h4mcb4zqclj2lgy34hwpj1bpkxwzkgrqikfsd8pp9wyq9q";
};
}
{
goPackagePath = "go.etcd.io/bbolt";
fetch = {
type = "git";
url = "https://github.com/etcd-io/bbolt";
rev = "v1.3.3";
sha256 = "0dn0zngks9xiz0rrrb3911f73ghl64z84jsmzai2yfmzqr7cdkqc";
};
}
{
goPackagePath = "golang.org/x/crypto";
fetch = {
type = "git";
url = "https://github.com/ProtonMail/crypto";
rev = "d3d8a14a4d4f";
sha256 = "1w5q5snw1nqsdwqffvk0rlmsqx5b6mpfi45adr2j4lzzpw1ab03a";
};
}
{
goPackagePath = "golang.org/x/mod";
fetch = {
type = "git";
url = "https://go.googlesource.com/mod";
rev = "c90efee705ee";
sha256 = "0i5md645rmcy5z5ij9ng428k9rz4g3k1kjy3blsq1264rn426gdf";
};
}
{
goPackagePath = "golang.org/x/net";
fetch = {
type = "git";
url = "https://go.googlesource.com/net";
rev = "244492dfa37a";
sha256 = "0vm2q44phz4vjnzq9428rjk58c82fxf003whczp7c9ryn9fazh7s";
};
}
{
goPackagePath = "golang.org/x/sync";
fetch = {
type = "git";
url = "https://go.googlesource.com/sync";
rev = "112230192c58";
sha256 = "05i2k43j2d0llq768hg5pf3hb2yhfzp9la1w5wp0rsnnzblr0lfn";
};
}
{
goPackagePath = "golang.org/x/sys";
fetch = {
type = "git";
url = "https://go.googlesource.com/sys";
rev = "33540a1f6037";
sha256 = "0fjcv0vzvi6za0b4xmnk3932pr9f9gczzf03y0kgq3ry9rqg169y";
};
}
{
goPackagePath = "golang.org/x/text";
fetch = {
type = "git";
url = "https://go.googlesource.com/text";
rev = "v0.3.2";
sha256 = "0flv9idw0jm5nm8lx25xqanbkqgfiym6619w575p7nrdh0riqwqh";
};
}
{
goPackagePath = "golang.org/x/tools";
fetch = {
type = "git";
url = "https://go.googlesource.com/tools";
rev = "49a3e744a425";
sha256 = "0xx4gv9wpv36crk7gv7imf5vzzs2mz7zla2q2jkck3xnzff8fw3v";
};
}
{
goPackagePath = "golang.org/x/xerrors";
fetch = {
type = "git";
url = "https://go.googlesource.com/xerrors";
rev = "9bdfabe68543";
sha256 = "1yjfi1bk9xb81lqn85nnm13zz725wazvrx3b50hx19qmwg7a4b0c";
};
}
{
goPackagePath = "gopkg.in/check.v1";
fetch = {
type = "git";
url = "https://gopkg.in/check.v1";
rev = "20d25e280405";
sha256 = "0k1m83ji9l1a7ng8a7v40psbymxasmssbrrhpdv2wl4rhs0nc3np";
};
}
{
goPackagePath = "gopkg.in/stretchr/testify.v1";
fetch = {
type = "git";
url = "https://gopkg.in/stretchr/testify.v1";
rev = "v1.2.2";
sha256 = "0dlszlshlxbmmfxj5hlwgv3r22x0y1af45gn1vd198nvvs3pnvfs";
};
}
{
goPackagePath = "gopkg.in/yaml.v2";
fetch = {
type = "git";
url = "https://gopkg.in/yaml.v2";
rev = "v2.2.2";
sha256 = "01wj12jzsdqlnidpyjssmj0r4yavlqy7dwrg7adqd8dicjc4ncsa";
};
}
{
goPackagePath = "rsc.io/quote";
fetch = {
type = "git";
url = "https://github.com/rsc/quote";
rev = "v3.1.0";
sha256 = "0nvv97hwwrl1mx5gzsbdm1ndnwpg3m7i2jb10ig9wily7zmvki0i";
};
}
{
goPackagePath = "rsc.io/sampler";
fetch = {
type = "git";
url = "https://github.com/rsc/sampler";
rev = "v1.3.0";
sha256 = "0byxk2ynba50py805kcvbvjzh59l1r308i1xgyzpw6lff4xx9xjh";
};
}
]

View File

@ -0,0 +1,17 @@
{}:
{
pname = "protonmail-bridge-headless";
tags = [ "pmapi_prod" "nogui" ];
# REVIEW: Some issue with IMAP tests that probably fail due to network
# sandboxing.
doCheck = false;
# Fix up name.
postInstall = ''
mv $out/bin/Desktop-Bridge $out/bin/protonmail-bridge
mv $out/bin/Import-Export $out/bin/protonmail-import-export
'';
}

View File

@ -1,17 +0,0 @@
# This file has been generated by node2nix 1.11.1. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_18"}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
inherit pkgs nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};
in
import ./packages.nix {
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
inherit nodeEnv;
}

View File

@ -1,21 +0,0 @@
{ pkgs
, stdenv
, src
, version
}:
let
nodeComposition = import ./composition.nix {
inherit pkgs;
inherit (stdenv.hostPlatform) system;
};
in
nodeComposition.package.override {
pname = "betanin";
inherit version;
src = "${src}/betanin_client";
postInstall = ''
PRODUCTION=true npm run-script build
'';
}

View File

@ -1,29 +0,0 @@
#!/usr/bin/env bash
# Requires node2nix and jq
cd "$(dirname "$0")" || exit 1
set -euo pipefail
nix build ..#betanin.src 2>/dev/null
tempDir=$(mktemp -d)
cp result/betanin_client/package.json "$tempDir/"
cp result/betanin_client/package-lock.json "$tempDir/"
pushd "$tempDir"
node2nix \
--input package.json \
--lock package-lock.json \
--output packages.nix \
--composition composition.nix \
--strip-optional-dependencies \
--development \
--nodejs-18
popd
cp "$tempDir"/*.nix .
rm result
rm -rf "$tempDir"

View File

@ -1,689 +0,0 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
let
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
# Common shell logic
installPackage = writeShellScript "install-package" ''
installPackage() {
local packageName=$1 src=$2
local strippedName
local DIR=$PWD
cd $TMPDIR
unpackFile $src
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/$packageName")"
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/$packageName"
elif [ -d "$src" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash $src)"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/$packageName"
fi
# Change to the package directory to install dependencies
cd "$DIR/$packageName"
}
'';
# Bundle the dependencies of the package
#
# Only include dependencies if they don't exist. They may also be bundled in the package.
includeDependencies = {dependencies}:
lib.optionalString (dependencies != []) (
''
mkdir -p node_modules
cd node_modules
''
+ (lib.concatMapStrings (dependency:
''
if [ ! -e "${dependency.packageName}" ]; then
${composePackage dependency}
fi
''
) dependencies)
+ ''
cd ..
''
);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
installPackage "${packageName}" "${src}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
else {
packageObj.devDependencies = {};
}
replaceDependencies(packageObj.optionalDependencies);
replaceDependencies(packageObj.peerDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
if(dependency.resolved) {
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
} else {
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
}
if(dependency.from !== undefined) { // Adopt from property if one has been provided
packageObj["_from"] = dependency.from;
}
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(![1, 2].includes(packageLock.lockfileVersion)) {
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "reconstructpackagelock.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 2,
requires: true,
packages: {
"": {
name: packageObj.name,
version: packageObj.version,
license: packageObj.license,
bin: packageObj.bin,
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
}
},
dependencies: {}
};
function augmentPackageJSON(filePath, packages, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
packages[filePath] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
};
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), packages, dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, packages, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, packages, dependencies);
}
});
} else {
augmentPackageJSON(filePath, packages, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.packages, lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
# Script that links bins defined in package.json to the node_modules bin directory
# NPM does not do this for top-level packages itself anymore as of v7
linkBinsScript = writeTextFile {
name = "linkbins.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var nodeModules = Array(packageObj.name.split("/").length).fill("..").join(path.sep);
if(packageObj.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
if(typeof packageObj.bin == "object") {
Object.keys(packageObj.bin).forEach(function(exe) {
if(fs.existsSync(packageObj.bin[exe])) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin[exe]),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
else {
if(fs.existsSync(packageObj.bin)) {
console.log("linking bin '" + packageObj.bin + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin),
path.join(nodeModules, ".bin", packageObj.name.split("/").pop())
);
}
else {
console.log("skipping non-existent bin '" + packageObj.bin + "'");
}
}
}
else if(packageObj.directories !== undefined && packageObj.directories.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
fs.readdirSync(packageObj.directories.bin).forEach(function(exe) {
if(fs.existsSync(path.join(packageObj.directories.bin, exe))) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.directories.bin, exe),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${lib.optionalString bypassCache ''
${lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
runHook postRebuild
if [ "''${dontNpmInstall-}" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} --no-bin-links --ignore-scripts ${npmFlags} ${lib.optionalString production "--production"} install
fi
# Link executables defined in package.json
node ${linkBinsScript}
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version ? null
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, meta ? {}
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
in
stdenv.mkDerivation ({
name = "${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit nodejs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
# Fixup all executables
ls $out/bin/* | while read i
do
file="$(readlink -f "$i")"
chmod u+rwx "$file"
if isScript "$file"
then
sed -i 's/\r$//' "$file" # convert crlf to lf
fi
done
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
meta = {
# default to Node.js' platforms
platforms = nodejs.meta.platforms;
} // meta;
} // extraArgs);
# Builds a node environment (a node_modules folder and a set of binaries)
buildNodeDependencies =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
in
stdenv.mkDerivation ({
name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
chmod 644 package-lock.json
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Expose the executables that were installed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
nodeDependencies = buildNodeDependencies args;
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation ({
name = "node-shell-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = lib.optionalString (dependencies != []) ''
export NODE_PATH=${nodeDependencies}/lib/node_modules
export PATH="${nodeDependencies}/bin:$PATH"
'';
} // extraArgs);
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
buildNodePackage = lib.makeOverridable buildNodePackage;
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
buildNodeShell = lib.makeOverridable buildNodeShell;
}

File diff suppressed because it is too large Load Diff

View File

@ -1,71 +0,0 @@
{ lib
, callPackage
, fetchFromGitHub
, python3
, beets
}:
let
py = python3.pkgs;
version = "0.4.0.ffe45c02";
src = fetchFromGitHub {
owner = "xeals";
repo = "betanin";
rev = "ffe45c028037fc1659f62a9cdc9e1413dc2f358d";
hash = "sha256-5d8Y7PDlhkdVRVX+KvpiQ2WYNRELwc+ya5s4Qi+YQpI=";
};
client = callPackage ./client {
inherit src version;
};
in
py.buildPythonApplication {
pname = "betanin";
inherit version src;
clientDistDir = "${client}/lib/node_modules/betanin/dist/";
doCheck = false;
patches = [ ./paths.patch ];
postPatch = ''
export libPrefix="${python3.libPrefix}"
substituteAllInPlace betanin/paths.py
'';
propagatedBuildInputs =
(builtins.attrValues {
inherit (py)
apprise
alembic
click
flask
flask-cors
flask-jwt-extended
flask_migrate
flask-restx
flask-socketio
flask-sqlalchemy
gevent
pyxdg
loguru
ptyprocess
python-engineio
python-socketio
sqlalchemy
sqlalchemy-utils
toml;
}) ++ [
beets
];
meta = {
homepage = "https://github.com/sentriz/betanin";
description = "beets based mitm of your torrent client and music player";
license = lib.licenses.gpl3Only;
maintainers = [ ];
platforms = python3.meta.platforms;
};
}

View File

@ -1,24 +0,0 @@
--- a/betanin/paths.py
+++ b/betanin/paths.py
@@ -19,11 +19,3 @@
BEETS_DIR = xdg.BaseDirectory.save_config_path("beets")
-CLIENT_DIST_DIR = _first_existing(
- os.path.join(os.getcwd(), "betanin_client", "dist"),
- os.path.join(site.getusersitepackages(), "betanin_client", "dist"),
- os.path.join(site.getsitepackages()[0], "betanin_client", "dist"),
-)
-MIGRATIONS_DIR = _first_existing(
- os.path.join(os.getcwd(), "betanin_migrations"),
- os.path.join(site.getusersitepackages(), "betanin_migrations"),
- os.path.join(site.getsitepackages()[0], "betanin_migrations"),
-)
+CLIENT_DIST_DIR = "@clientDistDir@"
+MIGRATIONS_DIR = "@out@/lib/@libPrefix@/site-packages/betanin_migrations"
@@ -38,6 +38,2 @@
CONFIG_PATH = os.path.join(CONFIG_DIR, "config.toml")
-VERSION_PATH = _first_existing(
- os.path.join(os.getcwd(), "betanin/version.txt"),
- os.path.join(site.getusersitepackages(), "betanin", "version.txt"),
- os.path.join(site.getsitepackages()[0], "betanin", "version.txt"),
-)
+VERSION_PATH = "@out@/lib/@libPrefix@/site-packages/betanin/version.txt"

View File

@ -1,94 +0,0 @@
{ lib
, stdenv
, fetchurl
, writeScriptBin
, appimageTools
, copyDesktopItems
, makeDesktopItem
}:
let
pname = "cura5";
version = "5.5.0";
name = "${pname}-${version}";
cura5 = appimageTools.wrapType2 {
inherit pname version;
src = fetchurl {
url = "https://github.com/Ultimaker/Cura/releases/download/${version}/Ultimaker-Cura-${version}-linux-X64.AppImage";
hash = "sha256-EG5LMiDFUSXFbtRhd15egPkbp12kEp2TdUdLssSy7Jg=";
};
extraPkgs = _: [ ];
};
script = writeScriptBin pname ''
#!${stdenv.shell}
# AppImage version of Cura loses current working directory and treats all paths relateive to $HOME.
# So we convert each of the files passed as argument to an absolute path.
# This fixes use cases like `cd /path/to/my/files; cura mymodel.stl anothermodel.stl`.
args=()
for a in "$@"; do
if [ -e "$a" ]; then
a="$(realpath "$a")"
fi
args+=("$a")
done
exec "${cura5}/bin/${name}" "''${args[@]}"
'';
in
stdenv.mkDerivation rec {
inherit pname version;
dontUnpack = true;
nativeBuildInputs = [ copyDesktopItems ];
desktopItems = [
# Based on upstream.
# https://github.com/Ultimaker/Cura/blob/main/packaging/AppImage/cura.desktop.jinja
(makeDesktopItem {
name = "cura";
desktopName = "UltiMaker Cura";
genericName = "3D Printing Software";
comment = meta.longDescription;
exec = "cura5";
icon = "cura-icon";
terminal = false;
type = "Application";
mimeTypes = [
"model/stl"
"application/vnd.ms-3mfdocument"
"application/prs.wavefront-obj"
"image/bmp"
"image/gif"
"image/jpeg"
"image/png"
"text/x-gcode"
"application/x-amf"
"application/x-ply"
"application/x-ctm"
"model/vnd.collada+xml"
"model/gltf-binary"
"model/gltf+json"
"model/vnd.collada+xml+zip"
];
categories = [ "Graphics" ];
keywords = [ "3D" "Printing" ];
})
];
# TODO: Extract cura-icon from AppImage source.
installPhase = ''
mkdir -p $out/bin
cp ${script}/bin/cura5 $out/bin/cura5
runHook postInstall
'';
meta = {
description = "3D printing software";
homepage = "https://github.com/ultimaker/cura";
longDescription = ''
Cura converts 3D models into paths for a 3D printer. It prepares your print for maximum accuracy, minimum printing time and good reliability with many extra features that make your print come out great.
'';
license = lib.licenses.lgpl3;
platforms = [ "x86_64-linux" ];
};
}

View File

@ -1,25 +0,0 @@
{ lib
, stdenv
, python3
}:
let
python = python3.withPackages (py: [ py.requests ]);
in
stdenv.mkDerivation {
name = "porkbun-ddns";
src = ./.;
inherit python;
installPhase = ''
mkdir -p $out/bin
install -Dm0755 $src/porkbun-ddns.py $out/bin/porkbun-ddns
substituteAllInPlace $out/bin/porkbun-ddns
'';
meta = {
description = "Porkbun dynamic DNS script";
license = lib.licenses.gpl3;
platforms = python.meta.platforms;
};
}

View File

@ -1,176 +0,0 @@
#!@python@/bin/python
import argparse
import json
import os
import re
import requests
from dataclasses import dataclass, fields as datafields
from enum import Enum, unique
from typing import List, Optional
APIBASE = "https://porkbun.com/api/json/v3/dns"
def dataclass_from_dict(klass: object, d: dict):
try:
fieldtypes = {f.name: f.type for f in datafields(klass)}
return klass(**{f: dataclass_from_dict(fieldtypes[f], d[f]) for f in d})
except:
return d # Not a dataclass field
def remove_domain(domain: str, name: str):
return re.sub(f"\\.?{domain}$", "", name)
@unique
class RecordType(Enum):
a = "A"
aaaa = "AAAA"
cname = "CNAME"
mx = "MX"
srv = "SRV"
txt = "TXT"
@dataclass
class Record:
id: str
name: str
type: str
content: str
ttl: str
prio: str = ""
notes: str = ""
@dataclass
class Retrieval:
status: str
records: List[Record]
class ApiError(Exception):
pass
class ArgumentError(Exception):
pass
class PorkbunClient:
def __init__(self, apikey: str, secretapikey: str):
self.apikey = apikey
self.secretapikey = secretapikey
def _make_payload(self, **kwargs):
return json.dumps(
{"apikey": self.apikey, "secretapikey": self.secretapikey, **kwargs}
)
def edit_record(
self,
domain: str,
record: Record,
name: Optional[str] = None,
type: Optional[RecordType] = None,
content: Optional[str] = None,
ttl: Optional[int] = None,
priority: Optional[str] = None,
) -> bool:
return self.edit(
domain,
record.id,
name=name or record.name,
type=type or RecordType(record.type),
content=content or record.content,
ttl=ttl or record.ttl,
priority=priority or record.prio,
)
def edit(
self,
domain: str,
id: str,
name: str,
type: RecordType,
content: str,
ttl: int = 300,
priority: Optional[str] = None,
) -> bool:
# API returns FQN name rather than the actual prefix, so scrub it
name = remove_domain(domain, name)
payload = self._make_payload(
name=name, type=type.value, content=content, ttl=str(ttl), prio=priority
)
res = requests.post(f"{APIBASE}/edit/{domain}/{id}", data=payload)
body = res.json()
if body["status"] != "SUCCESS":
raise ApiError(body["message"])
return True
def delete(self, domain: str, id: str) -> bool:
payload = self._make_payload()
res = requests.post(f"{APIBASE}/delete/{domain}/{id}", data=payload)
body = res.json()
if body["status"] != "SUCCESS":
raise ApiError(body["message"])
return True
def retrieve(self, domain: str) -> List[Retrieval]:
payload = self._make_payload()
res = requests.post(f"{APIBASE}/retrieve/{domain}", data=payload)
body = res.json()
if body["status"] != "SUCCESS":
raise ApiError(body["message"])
return [dataclass_from_dict(Record, d) for d in body["records"]]
def current_ip() -> str:
return requests.get("https://ifconfig.me").text
def _load_key(key: Optional[str], keyfile: Optional[str]) -> str:
if keyfile is not None:
with open(keyfile) as f:
return f.read().strip()
if key is not None:
return key
raise ArgumentError("key or key file is required")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Wrapper around Porkbun DNS API")
keyarg = parser.add_mutually_exclusive_group(required=True)
keyarg.add_argument("-k", "--key", metavar="KEY", type=str, help="API key")
keyarg.add_argument(
"-K", "--key-file", metavar="FILE", type=str, help="API key file"
)
secretarg = parser.add_mutually_exclusive_group(required=True)
secretarg.add_argument(
"-s", "--secret", metavar="SECRET", type=str, help="secret API key"
)
secretarg.add_argument(
"-S", "--secret-file", metavar="FILE", type=str, help="secret API key file"
)
parser.add_argument("domains", type=str, nargs="+", help="domain(s) to update")
args = parser.parse_args()
try:
apikey = _load_key(args.key, args.key_file)
secretapikey = _load_key(args.secret, args.secret_file)
except Exception as e:
print("error: " + str(e))
parser.print_help()
exit(1)
current_ip = current_ip()
client = PorkbunClient(apikey, secretapikey)
for domain in args.domains:
recs = client.retrieve(domain)
arecs = [r for r in recs if r.type == RecordType.a.value]
for arec in arecs:
if arec.content != current_ip:
client.edit_record(domain, arec, content=current_ip)
print(f"Pointed '{arec.name}' to {current_ip}")

View File

@ -1,73 +0,0 @@
{ lib
, stdenvNoCC
, fetchFromGitHub
}:
let
mkMonaspace =
{ pname
, variants ? [ ]
}: stdenvNoCC.mkDerivation rec {
inherit pname;
version = "1.000";
src = fetchFromGitHub {
owner = "githubnext";
repo = "monaspace";
rev = "v${version}";
hash = "sha256-Zo56r0QoLwxwGQtcWP5cDlasx000G9BFeGINvvwEpQs=";
};
_variants = map (builtins.replaceStrings [ " " ] [ "" ]) variants;
installPhase = ''
local out_font=$out/share/fonts/monaspace
'' + (if variants == [ ] then ''
install -m444 -Dt $out_font fonts/otf/*.otf
install -m444 -Dt $out_font fonts/variable/*.ttf
'' else ''
for variant in $_variants; do
install -m444 -Dt $out_font fonts/otf/"$variant"-*.otf
install -m444 -Dt $out_font fonts/variable/"$variant"Var*.ttf
done
'');
meta = {
description = "An innovative superfamily of fonts for code";
homepage = "https://monaspace.githubnext.com/";
longDescription = ''
Since the earliest days of the teletype machine, code has been set in
monospaced typeletters, on a grid. Monaspace is a new type system
that advances the state of the art for the display of code on screen.
'';
license = lib.licenses.ofl;
platforms = lib.platforms.all;
};
};
in
{
monaspace = mkMonaspace {
pname = "monaspace";
};
monaspace-argon = mkMonaspace {
pname = "monaspace-argon";
variants = [ "Monaspace Argon" ];
};
monaspace-krypton = mkMonaspace {
pname = "monaspace-krypton";
variants = [ "Monaspace Krypton" ];
};
monaspace-neon = mkMonaspace {
pname = "monaspace-neon";
variants = [ "Monaspace Neon" ];
};
monaspace-radon = mkMonaspace {
pname = "monaspace-radon";
variants = [ "Monaspace Radon" ];
};
monaspace-xenon = mkMonaspace {
pname = "monaspace-xenon";
variants = [ "Monaspace Xenon" ];
};
}

View File

@ -0,0 +1,68 @@
{ lib
, fetchFromGitHub
, buildGoModule
, makeWrapper
, go
, pkg-config
, qmake
, removeReferencesTo
}:
buildGoModule rec {
pname = "go-qt";
version = "20200904.gc0c124a";
src = fetchFromGitHub {
owner = "therecipe";
repo = "qt";
rev = "c0c124a5770d357908f16fa57e0aa0ec6ccd3f91";
sha256 = "197wdh2v0g5g2dpb1gcd5gp0g4wqzip34cawisvy6z7mygmsc8rd";
};
# fails with `GOFLAGS=-vendor=mod -trimpath`
allowGoReference = true;
preFixup = ''
find $out -type f -exec ${removeReferencesTo}/bin/remove-references-to -t ${go} '{}' +
'';
vendorSha256 = "00wghn93xz240ddj47b8mkbx3cg7c0486igp6vv0x9r6ylhywsm6";
subPackages = [ "cmd/..." ];
nativeBuildInputs = [ makeWrapper ];
# Fixes inconsistent vendoring.
postPatch = ''
cat <<EOM >>go.mod
require (
github.com/therecipe/env_darwin_amd64_513 v0.0.0-20190626001412-d8e92e8db4d0
github.com/therecipe/env_linux_amd64_513 v0.0.0-20190626000307-e137a3934da6
github.com/therecipe/env_windows_amd64_513 v0.0.0-20190626000028-79ec8bd06fb2
github.com/therecipe/env_windows_amd64_513/Tools v0.0.0-20190626000028-79ec8bd06fb2
github.com/therecipe/qt/internal/binding/files/docs/5.12.0 v0.0.0-20200904063919-c0c124a5770d
github.com/therecipe/qt/internal/binding/files/docs/5.13.0 v0.0.0-20200904063919-c0c124a5770d
)
EOM
'';
doCheck = true;
checkPhase = ''
$GOPATH/bin/qtsetup test
'';
postFixup = ''
for bin in $out/bin/*; do
wrapProgram $bin \
--set QT_PKG_CONFIG true \
--prefix PATH : ${pkg-config}/bin \
--prefix PATH : ${qmake}/bin
done
'';
meta = with lib; {
homepage = "https://github.com/therecipe/qt";
description = "Qt bindings for Go";
license = licenses.lgpl3;
broken = true;
};
}

View File

@ -0,0 +1,111 @@
# file generated from go.mod using vgo2nix (https://github.com/adisbladis/vgo2nix)
[
{
goPackagePath = "github.com/davecgh/go-spew";
fetch = {
type = "git";
url = "https://github.com/davecgh/go-spew";
rev = "v1.1.1";
sha256 = "0hka6hmyvp701adzag2g26cxdj47g21x6jz4sc6jjz1mn59d474y";
};
}
{
goPackagePath = "github.com/gopherjs/gopherjs";
fetch = {
type = "git";
url = "https://github.com/gopherjs/gopherjs";
rev = "bd77b112433e";
sha256 = "0bsj69jb0glfvqhqld4c78840iagd0iy2b92878i5w4sgp40k7i1";
};
}
{
goPackagePath = "github.com/konsorten/go-windows-terminal-sequences";
fetch = {
type = "git";
url = "https://github.com/konsorten/go-windows-terminal-sequences";
rev = "v1.0.2";
sha256 = "09mn209ika7ciy87xf2x31dq5fnqw39jidgaljvmqxwk7ff1hnx7";
};
}
{
goPackagePath = "github.com/pmezard/go-difflib";
fetch = {
type = "git";
url = "https://github.com/pmezard/go-difflib";
rev = "v1.0.0";
sha256 = "0c1cn55m4rypmscgf0rrb88pn58j3ysvc2d0432dp3c6fqg6cnzw";
};
}
{
goPackagePath = "github.com/sirupsen/logrus";
fetch = {
type = "git";
url = "https://github.com/sirupsen/logrus";
rev = "v1.4.1";
sha256 = "1m7ny9jkb98cxqhsp13xa5hnqh1s9f25x04q6arsala4zswsw33c";
};
}
{
goPackagePath = "github.com/stretchr/objx";
fetch = {
type = "git";
url = "https://github.com/stretchr/objx";
rev = "v0.2.0";
sha256 = "0pcdvakxgddaiwcdj73ra4da05a3q4cgwbpm2w75ycq4kzv8ij8k";
};
}
{
goPackagePath = "github.com/stretchr/testify";
fetch = {
type = "git";
url = "https://github.com/stretchr/testify";
rev = "v1.3.0";
sha256 = "0wjchp2c8xbgcbbq32w3kvblk6q6yn533g78nxl6iskq6y95lxsy";
};
}
{
goPackagePath = "golang.org/x/crypto";
fetch = {
type = "git";
url = "https://github.com/golang/crypto";
rev = "df01cb2cc480549d72034218dd98bf97671450ac";
sha256 = "1fhz0rym06j0fds0wg8xf6i9mm37m6qgdv83qxfh3nh0mgzf19qs";
};
}
{
goPackagePath = "golang.org/x/net";
fetch = {
type = "git";
url = "https://github.com/golang/net";
rev = "afa5a82059c6356159f699d81beff22a81842231";
sha256 = "1zhcr1m7n550m0lqjqg1rl2qrnghkl3a25vcm6cgpiqvk89f8lba";
};
}
{
goPackagePath = "golang.org/x/sys";
fetch = {
type = "git";
url = "https://github.com/golang/sys";
rev = "e8e3143a4f4a00f1fafef0dd82ba78223281b01b";
sha256 = "02pdj4gr658r99kqwxy6v4lb8w2h0zaiqq1jyqrsxqiyapijhw87";
};
}
{
goPackagePath = "golang.org/x/text";
fetch = {
type = "git";
url = "https://github.com/golang/text";
rev = "f4905fbd45b6790792202848439271c74074bbfd";
sha256 = "184d6ap2g92jvx0532lymdqbnx9vi9ips4dq5lnqbby0caccc6r1";
};
}
{
goPackagePath = "golang.org/x/tools";
fetch = {
type = "git";
url = "https://github.com/golang/tools";
rev = "aa740d4807891cb493e9f727901baf334b9fabce";
sha256 = "0431y2xw39dss0ldxhbmm7ip4d2cr2w9km2a5psgwan6v481kz0a";
};
}
]

View File

@ -1,6 +1,10 @@
{ pkgs }:
rec {
goModules = pkgs.recurseIntoAttrs rec {
qt = pkgs.libsForQt5.callPackage ../development/go-modules/qt { };
};
# A functional Jetbrains IDE-with-plugins package set.
jetbrains = pkgs.dontRecurseIntoAttrs rec {
jetbrainsPluginsFor = variant: import ../top-level/jetbrains-plugins.nix {
@ -21,22 +25,37 @@ rec {
ideaUltimateWithPlugins = ideaUltimatePlugins.jetbrainsWithPlugins;
};
monaspace-fonts = pkgs.callPackage ../data/fonts/monaspace/default.nix { };
inherit (monaspace-fonts)
monaspace
monaspace-argon
monaspace-krypton
monaspace-neon
monaspace-radon
monaspace-xenon;
mopidy-subidy = pkgs.callPackage ../applications/audio/mopidy/subidy.nix {
python3Packages = pkgs.python3Packages // python3Packages;
};
picom-animations = pkgs.picom.overrideAttrs (_oldAttrs: {
pname = "picom-animations";
src = pkgs.fetchFromGitHub {
owner = "jonaburg";
repo = "picom";
rev = "d718c94";
sha256 = "165mc53ryyxn2ybkhikmk51ay3k18mvlsym3am3mgr8cpivmf2rm";
};
});
polybar = pkgs.polybar.overrideAttrs (oldAttrs: {
# Enables an extra button in formatting, indirectly allowing the use of
# the mouse forward and backward buttons.
patches = (oldAttrs.patches or [ ]) ++ [ ../applications/misc/polybar/9button.patch ];
});
python3Packages = pkgs.recurseIntoAttrs {
py-sonic = pkgs.python3.pkgs.callPackage ../development/python-modules/py-sonic { };
};
# The one in Nixpkgs still extracts the pre-built Debian package instead
# of building from source.
protonmailBridgePackages = pkgs.libsForQt5.callPackage ../applications/networking/protonmail-bridge {
inherit goModules;
};
protonmail-bridge = protonmailBridgePackages.protonmail-bridge;
protonmail-bridge-headless = protonmailBridgePackages.protonmail-bridge-headless;
radeon-profile-daemon = pkgs.libsForQt5.callPackage ../tools/misc/radeon-profile-daemon { };
}

View File

@ -1,56 +0,0 @@
# This file turns the pkgs/by-name directory (see its README.md for more info)
# into an overlay that adds all the defined packages.
#
# No validity checks are done here, instead this file is optimised for
# performance, and validity checks are done by CI on PRs.
#
# This file is based on Nixpkgs' `pkgs/top-level/by-name-overlay.nix` in order
# to utilise the same infrastructure and layout, with some adjustments to fit
# our derivative project.
{ lib
, pkgs
}:
# Type: Path -> Overlay
baseDirectory:
let
inherit (builtins)
readDir
;
inherit (lib.attrsets)
mapAttrs
mapAttrsToList
mergeAttrsList
;
# Package files for a single shard
# Type: String -> String -> AttrsOf Path
namesForShard = shard: type:
if type != "directory" then
# Ignore all non-directories. Technically only README.md is allowed as a file in the base directory, so we could alternatively:
# - Assume that README.md is the only file and change the condition to `shard == "README.md"` for a minor performance improvement.
# This would however cause very poor error messages if there's other files.
# - Ensure that README.md is the only file, throwing a better error message if that's not the case.
# However this would make for a poor code architecture, because one type of error would have to be duplicated in the validity checks and here.
# Additionally in either of those alternatives, we would have to duplicate the hardcoding of "README.md"
{ }
else
mapAttrs
(name: _: baseDirectory + "/${shard}/${name}/package.nix")
(readDir (baseDirectory + "/${shard}"));
# The attribute set mapping names to the package files defining them
# This is defined up here in order to allow reuse of the value (it's kind of expensive to compute)
# if the overlay has to be applied multiple times
packageFiles = mergeAttrsList (mapAttrsToList namesForShard (readDir baseDirectory));
in
# TODO: Consider optimising this using `builtins.deepSeq packageFiles`,
# which could free up the above thunks and reduce GC times.
# Currently this would be hard to measure until we have more packages
# and ideally https://github.com/NixOS/nix/pull/8895
_self: _super:
mapAttrs
(_name: file: pkgs.callPackage file { })
packageFiles

View File

@ -1,19 +0,0 @@
# Composes the packages collection.
{
# The system packages will be build and used on.
localSystem
# Nixpkgs
, pkgs
# Nixpkgs lib
, lib ? pkgs.lib
}:
let
allPackages = import ./stage.nix {
inherit lib pkgs;
};
available = lib.filterAttrs
(_: drv: builtins.elem localSystem (drv.meta.platforms or [ ]));
in
available allPackages

View File

@ -1,24 +0,0 @@
# Composes a single bootstrapping of the package collection. The result is a set
# of all the packages for some particular platform.
{ lib
, pkgs
}:
let
# An overlay to auto-call packages in .../by-name.
autoCalledPackages =
import ./by-name-overlay.nix { inherit pkgs lib; } ../by-name;
allPackages = _self: _super:
import ./all-packages.nix { inherit pkgs; };
toFix = (lib.flip lib.composeManyExtensions) (_self: { }) [
autoCalledPackages
allPackages
];
in
# Return the complete set of packages.
lib.fix toFix

View File

@ -0,0 +1,55 @@
{ stdenv
, lib
, fetchFromGitHub
, alacritty
, fontconfig
, freetype
, libglvnd
, libxcb
}:
alacritty.overrideAttrs (oldAttrs: rec {
pname = "${oldAttrs.pname}-ligatures";
version = "0.7.2.20210209.g3ed0430";
src = fetchFromGitHub {
owner = "zenixls2";
repo = "alacritty";
fetchSubmodules = true;
rev = "3ed043046fc74f288d4c8fa7e4463dc201213500";
sha256 = "1dGk4ORzMSUQhuKSt5Yo7rOJCJ5/folwPX2tLiu0suA=";
};
cargoDeps = oldAttrs.cargoDeps.overrideAttrs (lib.const {
name = "${pname}-${version}-vendor.tar.gz";
inherit src;
outputSha256 = "pONu6caJmEKnbr7j+o9AyrYNpS4Q8OEjNZOhGTalncc=";
});
ligatureInputs = [
fontconfig
freetype
libglvnd
stdenv.cc.cc.lib
libxcb
];
preferLocalBuild = true;
buildInputs = (oldAttrs.buildInputs or [ ]) ++ ligatureInputs;
# HACK: One of the ligature libraries required the C++ stdlib at runtime,
# and I can't work out a better way to push it to the RPATH.
postInstall = lib.optional (!stdenv.isDarwin) ''
patchelf \
--set-rpath ${lib.makeLibraryPath ligatureInputs}:"$(patchelf --print-rpath $out/bin/alacritty)" \
$out/bin/alacritty
'';
meta = oldAttrs.meta // {
description = "Alacritty with ligature patch applied";
homepage = "https://github.com/zenixls2/alacritty/tree/ligature";
};
})

View File

@ -2,18 +2,18 @@
, lib
, fetchurl
, makeWrapper
, jdk17
, jre
, udev
, xorg
}:
stdenv.mkDerivation rec {
pname = "atlauncher";
version = "3.4.35.2";
version = "3.4.20.2";
src = fetchurl {
url = "https://github.com/ATLauncher/ATLauncher/releases/download/v${version}/ATLauncher-${version}.jar";
hash = "sha256-CVJQGMnETW9BOn2To09/UuLrseNfovUyEFhcz/zyHOQ=";
hash = "sha256-YnCDs67BVhJ5rwY6jTbfgHKPbavtmcIMd16AWMBUDgk=";
};
dontUnpack = true;
@ -23,11 +23,10 @@ stdenv.mkDerivation rec {
installPhase = ''
mkdir -p $out/bin $out/share/java
cp $src $out/share/java/ATLauncher.jar
makeWrapper ${jdk17}/bin/java $out/bin/atlauncher \
makeWrapper ${jre}/bin/java $out/bin/atlauncher \
--prefix LD_LIBRARY_PATH : "${lib.makeLibraryPath [ xorg.libXxf86vm udev ]}" \
--add-flags "-jar $out/share/java/ATLauncher.jar" \
--add-flags "--working-dir \''${XDG_DATA_HOME:-\$HOME/.local/share}/ATLauncher" \
--add-flags "--no-launcher-update"
--add-flags "--working-dir \''${XDG_DATA_HOME:-\$HOME/.local/share}/ATLauncher"
'';
meta = {

View File

@ -15,6 +15,7 @@
, wayland
, wayland-protocols
, wlroots
, xwayland
}:
stdenv.mkDerivation rec {

View File

@ -14,7 +14,7 @@
, mp4Support ? false
, fdk-aac-encoder
, oggSupport ? false
, vorbis-tools
, vorbisTools
, opusSupport ? false
, opusTools
}:
@ -23,7 +23,7 @@ assert aacSupport -> faac.meta.available;
assert alacSupport -> libav.meta.available;
assert flacSupport -> flac.meta.available;
assert m4aSupport || mp4Support -> fdk-aac-encoder.meta.available;
assert oggSupport -> vorbis-tools.meta.available;
assert oggSupport -> vorbisTools.meta.available;
assert opusSupport -> opusTools.meta.available;
python3Packages.buildPythonApplication rec {
@ -51,7 +51,7 @@ python3Packages.buildPythonApplication rec {
(if alacSupport then libav else null)
(if aacSupport then faac else null)
(if (m4aSupport || mp4Support) then fdk-aac-encoder else null)
(if oggSupport then vorbis-tools else null)
(if oggSupport then vorbisTools else null)
(if opusSupport then opusTools else null)
];