Rewrite import script in Rust (#320)

This commit is contained in:
Yannik Sander 2021-08-17 10:55:08 +02:00 committed by GitHub
parent 42b0e87393
commit 5ad71362e5
Failed to generate hash of commit
43 changed files with 5544 additions and 174 deletions

71
.github/workflows/cron-nixpkgs.yml vendored Normal file
View file

@ -0,0 +1,71 @@
name: "Nixpkgs: Hourly import to Elasticsearch"
on:
schedule:
- cron: '0 * * * *'
jobs:
import-channel:
runs-on: ubuntu-latest
strategy:
fail-fast: true
matrix:
channel:
- unstable
- 21.05
- 20.09
env:
RUST_LOG: debug
FI_ES_EXISTS_STRATEGY: abort
FI_ES_URL: ${{ secrets.ELASTICSEARCH_URL }}
steps:
- name: Checking out the repository
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Installing Nix
uses: cachix/install-nix-action@v13
with:
install_url: https://nixos-nix-install-tests.cachix.org/serve/i6laym9jw3wg9mw6ncyrk6gjx4l34vvx/install
install_options: '--tarball-url-prefix https://nixos-nix-install-tests.cachix.org/serve'
extra_nix_config: |
experimental-features = nix-command flakes
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
- uses: cachix/cachix-action@v10
with:
name: nixos-search
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
- name: Install unstable channel
run: |
nix-channel --add https://nixos.org/channels/nixpkgs-unstable
nix-channel --update
- name: Installing jq
run: |
nix-env -iA nixpkgs.nixFlakes nixpkgs.jq
- name: Building import_scripts
run: |
nix build ./#packages.x86_64-linux.flake_info
- name: Import ${{ matrix.channel }} channel
run: |
./result/bin/flake-info --push --elastic-schema-version=$(cat ./VERSION) nixpkgs ${{ matrix.channel }}
if: github.repository == 'NixOS/nixos-search'
- name: Warmup ${{ matrix.channel }} channel
run: |
curl ${{ secrets.ELASTICSEARCH_URL }}/latest-$(cat VERSION)-nixpkgs-${{ matrix.channel }}/_search | jq '.took'
curl ${{ secrets.ELASTICSEARCH_URL }}/latest-$(cat VERSION)-nixpkgs-${{ matrix.channel }}/_search | jq '.took'
curl ${{ secrets.ELASTICSEARCH_URL }}/latest-$(cat VERSION)-nixpkgs-${{ matrix.channel }}/_search | jq '.took'
if: github.repository == 'NixOS/nixos-search'

View file

@ -1,98 +0,0 @@
name: "Hourly import channel to Elasticsearch"
on:
schedule:
- cron: '0 * * * *'
jobs:
hourly-import-channel:
runs-on: ubuntu-latest
strategy:
fail-fast: true
matrix:
channel:
- unstable
- 21.05
- 20.09
env:
AWS_DEFAULT_REGION: us-east-1
AWS_S3_URL: s3://nix-releases/nixpkgs
steps:
- name: Checking out the repository
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Installing Nix
uses: cachix/install-nix-action@v13
- uses: cachix/cachix-action@v10
with:
name: nixos-search
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
- name: Install unstable channel
run: |
nix-channel --add https://nixos.org/channels/nixpkgs-unstable
nix-channel --update
- name: Installing awscli
run: |
nix-env -iA nixpkgs.awscli2
- name: Check for latest evaluation in ${{ matrix.channel }} channel
run: |
if [ "${{ matrix.channel }}" = "unstable" ]; then
RELEASE=$(aws s3 ls --no-sign-request "$AWS_S3_URL/" | grep '/$' | cut -d' ' -f29 | sort | tail -1 | sed 's|/||')
else
RELEASE=$(aws s3 ls --no-sign-request "$AWS_S3_URL/" | grep 'nixpkgs-${{ matrix.channel }}pre' | grep '/$' | cut -d' ' -f29 | sort | tail -1 | sed 's|/||')
fi
aws s3 cp --no-sign-request "$AWS_S3_URL/$RELEASE/src-url" ./
EVAL_ID=$(cat src-url | cut -c30-)
echo "EVAL_ID=${EVAL_ID}" >> $GITHUB_ENV
- name: Download latest builds for ${{ matrix.channel }} channel (if needed)
if: steps.eval-cache.outputs.cache-hit != 'true'
run: |
mkdir -p ./eval-cache
cp ./src-url ./eval-cache/
curl -H "Content-Type: application/json" "$(cat ./eval-cache/src-url)/builds" -o ./eval-cache/builds.json
- name: Cache ${{ matrix.channel }} channel builds
id: eval-cache
uses: actions/cache@v2
with:
path: ./eval-cache
key: eval-cache-${{ env.EVAL_ID }}
- name: Installing nixFlakes (and jq)
run: |
nix-env -iA nixpkgs.nixFlakes nixpkgs.jq
echo 'experimental-features = nix-command flakes' | sudo tee -a /etc/nix/nix.conf
nix --version
cat /etc/nix/nix.conf
echo "$HOME/.nix-profile/bin" >> $GITHUB_PATH
- name: Building import_scripts
run: |
nix build ./#packages.x86_64-linux.import_scripts
- name: Import ${{ matrix.channel }} channel
run: |
cp ./eval-cache/builds.json ./eval-${{ env.EVAL_ID }}.json
./result/bin/import-channel --es-url ${{ secrets.ELASTICSEARCH_URL }} --channel ${{ matrix.channel }} -vvv
if: github.repository == 'NixOS/nixos-search'
- name: Warmup ${{ matrix.channel }} channel
run: |
curl ${{ secrets.ELASTICSEARCH_URL }}/latest-$(cat VERSION)-${{ matrix.channel }}/_search | jq '.took'
curl ${{ secrets.ELASTICSEARCH_URL }}/latest-$(cat VERSION)-${{ matrix.channel }}/_search | jq '.took'
curl ${{ secrets.ELASTICSEARCH_URL }}/latest-$(cat VERSION)-${{ matrix.channel }}/_search | jq '.took'
if: github.repository == 'NixOS/nixos-search'

View file

@ -1,5 +1,6 @@
name: "Build & Deploy to Netlify"
on:
pull_request:
push:
branches:
- main
@ -50,7 +51,7 @@ jobs:
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }}
with:
production-branch: 'main'
production-deploy: true
production-deploy: ${{ github.event_name == 'push' }}
publish-dir: './dist'
github-token: ${{ secrets.GITHUB_TOKEN }}
deploy-message: 'Deploy from GitHub Actions'

View file

@ -1,57 +0,0 @@
name: "Build & Deploy to Netlify"
on:
pull_request:
jobs:
build-and-deploy:
runs-on: ubuntu-latest
steps:
- name: Checking out the repository
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Installing Nix
uses: cachix/install-nix-action@v13
- uses: cachix/cachix-action@v10
with:
name: nixos-search
- name: Install unstable channel
run: |
nix-channel --add https://nixos.org/channels/nixpkgs-unstable
nix-channel --update
- name: Installing NixFlakes
run: |
nix-env -iA nixpkgs.nixFlakes
echo 'experimental-features = nix-command flakes' | sudo tee -a /etc/nix/nix.conf
nix --version
cat /etc/nix/nix.conf
echo "$HOME/.nix-profile/bin" >> $GITHUB_PATH
- name: Building import_scripts
run: |
nix build ./#packages.x86_64-linux.import_scripts
- name: Building search.nixos.org
run: |
nix build ./#packages.x86_64-linux.frontend
mkdir ./dist
cp -RL ./result/* ./dist/
- name: Deploy to Netlify
uses: nwtgck/actions-netlify@v1.2
env:
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }}
with:
production-branch: 'main'
publish-dir: './dist'
github-token: ${{ secrets.GITHUB_TOKEN }}
deploy-message: 'Deploy from GitHub Actions'
enable-pull-request-comment: true
enable-commit-comment: true
enable-commit-status: true
overwrites-pull-request-comment: false
if: github.repository == 'NixOS/nixos-search'

3
.gitignore vendored
View file

@ -5,6 +5,7 @@
.vscode
.node_repl_history
.npm
target/
build/Release
dist
elm-stuff/
@ -18,4 +19,4 @@ npm-debug.log*
package-lock.json
repl-temp-*
result
src-url
src-url

View file

@ -1 +1 @@
20
21

1725
flake-info/Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

30
flake-info/Cargo.toml Normal file
View file

@ -0,0 +1,30 @@
[package]
name = "flake-info"
version = "0.3.0"
authors = ["Yannik Sander <me@ysndr.de>"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
clap = "^2.33"
serde = {version="1.0", features = ["derive"]}
serde_json = "1.0"
anyhow = "1.0"
thiserror = "1.0"
structopt = "0.3"
command-run = "0.13"
env_logger = "0.8"
log = "0.4"
tempfile = "3"
lazy_static = "1.4"
fancy-regex = "0.6"
tokio = { version = "*", features = ["full"] }
reqwest = { version = "0.11", features = ["json", "blocking"] }
sha2 = "0.9"
elasticsearch = {git = "https://github.com/elastic/elasticsearch-rs", features = ["rustls-tls"]}
[lib]
name = "flake_info"
path = "./src/lib.rs"

206
flake-info/README.md Normal file
View file

@ -0,0 +1,206 @@
# Flake Info
A tool that fetches packages and apps from nix flakes.
## Usage
```
flake-info 0.3.0
Extracts various information from a given flake
USAGE:
flake-info [FLAGS] [OPTIONS] [extra]... <SUBCOMMAND>
FLAGS:
--push Push to Elasticsearch (Configure using FI_ES_* environment variables)
-h, --help Prints help information
--json Print ElasticSeach Compatible JSON output
-V, --version Prints version information
OPTIONS:
--elastic-exists <elastic-exists>
How to react to existing indices [env: FI_ES_EXISTS_STRATEGY=] [default: abort] [possible values: Abort,
Ignore, Recreate]
--elastic-index-name <elastic-index-name> Name of the index to store results to [env: FI_ES_INDEX=]
-p, --elastic-pw <elastic-pw>
Elasticsearch password (unimplemented) [env: FI_ES_PASSWORD=]
--elastic-schema-version <elastic-schema-version>
Which schema version to associate with the operation [env: FI_ES_VERSION=]
--elastic-url <elastic-url>
Elasticsearch instance url [env: FI_ES_URL=] [default: http://localhost:9200]
-u, --elastic-user <elastic-user> Elasticsearch username (unimplemented) [env: FI_ES_USER=]
-k, --kind <kind>
Kind of data to extract (packages|options|apps|all) [default: all]
ARGS:
<extra>... Extra arguments that are passed to nix as it
SUBCOMMANDS:
flake
group
help Prints this message or the help of the given subcommand(s)
nixpkgs
```
### flake
Flakes can be imported using the flake subcommand
```
USAGE:
flake-info flake [FLAGS] <flake>
FLAGS:
--gc Whether to gc the store after info or not
-h, --help Prints help information
--temp-store Whether to use a temporary store or not. Located at /tmp/flake-info-store
-V, --version Prints version information
ARGS:
<flake> Flake identifier passed to nix to gather information about
```
The `<flake>` argument should contain a valid reference to a flake. It accepts all formats nix accepts:
> use git+<url> to checkout a git repository at <url>
> use /local/absolute/path or ./relative/path to load a local source
> use gitlab:<user>/<repo>/github:<user>/<repo> to shortcut gitlab or github repositories
Optionally, analyzing can be done in a temporary store enabled by the `--temp-store` option.
#### Example
```
$ flake-info flake github:ngi-nix/offen
```
### nixpkgs
nixpkgs currently have to be imported in a different way. This is what the `nixpkgs` subcommand exists for.
It takes any valid git reference to the upstream [`nixos/nixpkgs`](https://github.com/iixos/nixpkgs/) repo as an argument and produces a complete output.
**This operation may take a short while and produces lots of output**
#### Example
```
$ flake-info nixpkgs nixos-21.05
```
### group
to perform a bulk import grouping multiple inputs under the same name/index use the group command.
It expects a json file as input that contains references to flakes or nixpkgs. If those resources are on github or gitlab they can be extended with more meta information including pinning the commit hash/ref.
The second argument is the group name that is used to provide the index name.
#### Example
An example `targets.json` file can look like the following
```json
[
{
"type": "git",
"url": "./."
},
{
"type": "git",
"url": "github:fluffynukeit/adaspark"
},
{
"type": "github",
"owner": "ngi-nix",
"repo": "offen",
"hash": "4052febf151d60aa4352fa1960cf3ae088f600aa",
"description": "Hier könnte Ihre Werbung stehen"
}
]
```
```
$ flake-info group ./targets.json small-group
```
### Elasticsearch
A number of flags is dedicated to pushing to elasticsearch.
```
--elastic-exists <elastic-exists>
How to react to existing indices [env: FI_ES_EXISTS_STRATEGY=] [default: abort]
[possible values: Abort, Ignore, Recreate]
--elastic-index-name <elastic-index-name>
Name of the index to store results to [env: FI_ES_INDEX=]
-p, --elastic-pw <elastic-pw>
Elasticsearch password (unimplemented) [env: FI_ES_PASSWORD=]
--elastic-schema-version <elastic-schema-version>
Which schema version to associate with the operation [env: FI_ES_VERSION=]
--elastic-url <elastic-url>
Elasticsearch instance url [env: FI_ES_URL=] [default: http://localhost:9200]
-u, --elastic-user <elastic-user> Elasticsearch username (unimplemented) [env: FI_ES_USER=]
```
#### Example
```
$ flake-info --push \
--elastic-url http://localhost:5555 \
--elastic-index-name latest-21-21.05
--elastic-schema-version 21 group ./examples/ngi-nix.json ngi-nix
```
## Installation
### Preparations
This tool requires your system to have Nix installed!
You can install nix using this installer: https://nixos.org/guides/install-nix.html
Also, see https://nixos.wiki/wiki/Nix_Installation_Guide if your system is ✨special✨.
### Preparations (Docker)
If you do not want to install nix on your system, using Docker is an alternative.
Enter the [nixos/nix](https://hub.docker.com/u/nixos/) docker image and proceed
### Setup nix flakes
Note that you also need to have nix flakes support.
Once you have nix installed run the following commands:
1. ```
$ nix-shell -I nixpkgs=channel:nixos-21.05 -p nixFlakes
```
to enter a shell with the preview version of nix flakes installed.
2. ```
$ mkdir -p ~/.config/nix
$ echo "experimental-features = nix-command flakes" > .config/nix/nix.conf
```
to enable flake support
### Installation, finally
This project is defined as a flake therefore you can build the tool using
```
$ nix build <project root>
or
$ nix build github:miszkur/github-search
```
Replace `build` with run if you want to run the tool directly.

16
flake-info/default.nix Normal file
View file

@ -0,0 +1,16 @@
{ pkgs ? import <nixpkgs> { } }: with pkgs;
rustPlatform.buildRustPackage rec {
name = "flake-info";
src = ./.;
cargoSha256 = "sha256-TA1WEvmOfnxQ+rRwkIPN1t4VPrDL6pq+WnPHVu2/CPE=";
nativeBuildInputs = [ pkg-config ];
buildInputs = [ openssl openssl.dev ] ++ lib.optional pkgs.stdenv.isDarwin [libiconv darwin.apple_sdk.frameworks.Security];
checkFlags = [
"--skip elastic::tests"
"--skip nix_gc::tests"
];
}

View file

@ -0,0 +1,48 @@
[
{
"flake_description": "Extracting information from flakes",
"flake_resolved": {
"type": "git",
"url": "file:///Volumes/projects/Uni/courses/kth/DD2476-Search-Engines-and-Information-Retrieval-Systems/project?dir=flake-info"
},
"flake_name": "",
"flake_source": {
"type": "git",
"url": "./."
},
"package_attr_name": "flake-info",
"package_pname": "flake-info",
"package_pversion": "",
"package_platforms": [
"x86_64-linux",
"x86_64-darwin",
"i686-linux",
"aarch64-linux"
],
"package_outputs": [
"out"
],
"package_license": {}
},
{
"flake_description": "Extracting information from flakes",
"flake_resolved": {
"type": "git",
"url": "file:///Volumes/projects/Uni/courses/kth/DD2476-Search-Engines-and-Information-Retrieval-Systems/project?dir=flake-info"
},
"flake_name": "",
"flake_source": {
"type": "git",
"url": "./."
},
"app_bin": "/nix/store/4akx0is6fgh9ci2ak5sbskwzykr0xj85-flake-info/bin/flake-info",
"app_attr_name": "flake-info",
"app_platforms": [
"x86_64-linux",
"x86_64-darwin",
"i686-linux",
"aarch64-linux"
],
"app_type": "app"
}
]

View file

@ -0,0 +1,346 @@
[
{
"flake_description": "Compilers and tools for SPARK2014 Ada development",
"flake_resolved": {
"type": "github",
"owner": "fluffynukeit",
"repo": "adaspark"
},
"flake_name": "adaspark",
"flake_source": {
"type": "git",
"url": "github:fluffynukeit/adaspark"
},
"package_attr_name": "adaspark",
"package_pname": "adaspark",
"package_pversion": "",
"package_platforms": [
"x86_64-linux"
],
"package_outputs": [
"out"
],
"package_license": {}
},
{
"flake_description": "Compilers and tools for SPARK2014 Ada development",
"flake_resolved": {
"type": "github",
"owner": "fluffynukeit",
"repo": "adaspark"
},
"flake_name": "adaspark",
"flake_source": {
"type": "git",
"url": "github:fluffynukeit/adaspark"
},
"package_attr_name": "asis",
"package_pname": "ASIS",
"package_pversion": "gcc-10.1.0",
"package_platforms": [
"x86_64-linux"
],
"package_outputs": [
"out"
],
"package_license": {}
},
{
"flake_description": "Compilers and tools for SPARK2014 Ada development",
"flake_resolved": {
"type": "github",
"owner": "fluffynukeit",
"repo": "adaspark"
},
"flake_name": "adaspark",
"flake_source": {
"type": "git",
"url": "github:fluffynukeit/adaspark"
},
"package_attr_name": "aunit",
"package_pname": "AUnit",
"package_pversion": "20.2",
"package_platforms": [
"x86_64-linux"
],
"package_outputs": [
"out"
],
"package_license": {}
},
{
"flake_description": "Compilers and tools for SPARK2014 Ada development",
"flake_resolved": {
"type": "github",
"owner": "fluffynukeit",
"repo": "adaspark"
},
"flake_name": "adaspark",
"flake_source": {
"type": "git",
"url": "github:fluffynukeit/adaspark"
},
"package_attr_name": "gnat",
"package_pname": "gnat-10.2.0",
"package_pversion": "10.2.0",
"package_platforms": [
"x86_64-linux"
],
"package_outputs": [
"out",
"man",
"info"
],
"package_description": "GNU Compiler Collection, version 10.2.0 (wrapper script)",
"package_license": {
"license_long": "GNU General Public License v3.0 or later",
"license": "gpl3Plus",
"license_url": "https://spdx.org/licenses/GPL-3.0-or-later.html"
}
},
{
"flake_description": "Compilers and tools for SPARK2014 Ada development",
"flake_resolved": {
"type": "github",
"owner": "fluffynukeit",
"repo": "adaspark"
},
"flake_name": "adaspark",
"flake_source": {
"type": "git",
"url": "github:fluffynukeit/adaspark"
},
"package_attr_name": "gnat_util",
"package_pname": "gnat_util",
"package_pversion": "10.1.0",
"package_platforms": [
"x86_64-linux"
],
"package_outputs": [
"out"
],
"package_license": {}
},
{
"flake_description": "Compilers and tools for SPARK2014 Ada development",
"flake_resolved": {
"type": "github",
"owner": "fluffynukeit",
"repo": "adaspark"
},
"flake_name": "adaspark",
"flake_source": {
"type": "git",
"url": "github:fluffynukeit/adaspark"
},
"package_attr_name": "gnatcoll-core",
"package_pname": "gnatcoll-core",
"package_pversion": "20.2",
"package_platforms": [
"x86_64-linux"
],
"package_outputs": [
"out"
],
"package_license": {}
},
{
"flake_description": "Compilers and tools for SPARK2014 Ada development",
"flake_resolved": {
"type": "github",
"owner": "fluffynukeit",
"repo": "adaspark"
},
"flake_name": "adaspark",
"flake_source": {
"type": "git",
"url": "github:fluffynukeit/adaspark"
},
"package_attr_name": "gpr",
"package_pname": "gprbuild",
"package_pversion": "20.2",
"package_platforms": [
"x86_64-linux"
],
"package_outputs": [
"out"
],
"package_license": {}
},
{
"flake_description": "Compilers and tools for SPARK2014 Ada development",
"flake_resolved": {
"type": "github",
"owner": "fluffynukeit",
"repo": "adaspark"
},
"flake_name": "adaspark",
"flake_source": {
"type": "git",
"url": "github:fluffynukeit/adaspark"
},
"package_attr_name": "spark",
"package_pname": "SPARK2014",
"package_pversion": "20.2",
"package_platforms": [
"x86_64-linux"
],
"package_outputs": [
"out"
],
"package_license": {}
},
{
"flake_description": "Compilers and tools for SPARK2014 Ada development",
"flake_resolved": {
"type": "github",
"owner": "fluffynukeit",
"repo": "adaspark"
},
"flake_name": "adaspark",
"flake_source": {
"type": "git",
"url": "github:fluffynukeit/adaspark"
},
"package_attr_name": "xmlada",
"package_pname": "xmlada",
"package_pversion": "20.2",
"package_platforms": [
"x86_64-linux"
],
"package_outputs": [
"out"
],
"package_license": {}
},
{
"flake_description": "Offen, a fair web analytics tool",
"flake_resolved": {
"type": "github",
"owner": "ngi-nix",
"repo": "offen"
},
"flake_name": "offen",
"flake_source": {
"type": "github",
"owner": "ngi-nix",
"repo": "offen",
"description": "Hier könnte Ihre Werbung stehen",
"git_ref": "4052febf151d60aa4352fa1960cf3ae088f600aa"
},
"package_attr_name": "license_finder",
"package_pname": "license_finder",
"package_pversion": "",
"package_platforms": [
"x86_64-linux"
],
"package_outputs": [
"out"
],
"package_license": {}
},
{
"flake_description": "Offen, a fair web analytics tool",
"flake_resolved": {
"type": "github",
"owner": "ngi-nix",
"repo": "offen"
},
"flake_name": "offen",
"flake_source": {
"type": "github",
"owner": "ngi-nix",
"repo": "offen",
"description": "Hier könnte Ihre Werbung stehen",
"git_ref": "4052febf151d60aa4352fa1960cf3ae088f600aa"
},
"package_attr_name": "offen",
"package_pname": "offen-20210115",
"package_pversion": "20210115",
"package_platforms": [
"x86_64-linux"
],
"package_outputs": [
"out"
],
"package_license": {}
},
{
"flake_description": "Offen, a fair web analytics tool",
"flake_resolved": {
"type": "github",
"owner": "ngi-nix",
"repo": "offen"
},
"flake_name": "offen",
"flake_source": {
"type": "github",
"owner": "ngi-nix",
"repo": "offen",
"description": "Hier könnte Ihre Werbung stehen",
"git_ref": "4052febf151d60aa4352fa1960cf3ae088f600aa"
},
"package_attr_name": "offen-auditorium",
"package_pname": "offen-auditorium",
"package_pversion": "20210115",
"package_platforms": [
"x86_64-linux"
],
"package_outputs": [
"out"
],
"package_license": {}
},
{
"flake_description": "Offen, a fair web analytics tool",
"flake_resolved": {
"type": "github",
"owner": "ngi-nix",
"repo": "offen"
},
"flake_name": "offen",
"flake_source": {
"type": "github",
"owner": "ngi-nix",
"repo": "offen",
"description": "Hier könnte Ihre Werbung stehen",
"git_ref": "4052febf151d60aa4352fa1960cf3ae088f600aa"
},
"package_attr_name": "offen-script",
"package_pname": "offen-script",
"package_pversion": "20210115",
"package_platforms": [
"x86_64-linux"
],
"package_outputs": [
"out"
],
"package_license": {}
},
{
"flake_description": "Offen, a fair web analytics tool",
"flake_resolved": {
"type": "github",
"owner": "ngi-nix",
"repo": "offen"
},
"flake_name": "offen",
"flake_source": {
"type": "github",
"owner": "ngi-nix",
"repo": "offen",
"description": "Hier könnte Ihre Werbung stehen",
"git_ref": "4052febf151d60aa4352fa1960cf3ae088f600aa"
},
"package_attr_name": "offen-vault",
"package_pname": "offen-vault",
"package_pversion": "20210115",
"package_platforms": [
"x86_64-linux"
],
"package_outputs": [
"out"
],
"package_license": {}
}
]

View file

@ -0,0 +1,13 @@
[
{
"type": "git",
"url": "github:fluffynukeit/adaspark"
},
{
"type": "github",
"owner": "ngi-nix",
"repo": "offen",
"git_ref": "4052febf151d60aa4352fa1960cf3ae088f600aa",
"description": "Hier könnte Ihre Werbung stehen"
}
]

View file

@ -0,0 +1,4 @@
github:serokell/deploy-rs
github:W95Psp/LiterateFStar
github:ngi-nix/openpgp-ca
./.

View file

@ -0,0 +1,46 @@
[
{
"flake_description": "LiterateFStar",
"flake_resolved": {
"type": "github",
"owner": "W95Psp",
"repo": "LiterateFStar"
},
"flake_name": "LiterateFStar",
"flake_source": {
"type": "git",
"url": "github:W95Psp/LiterateFStar"
},
"package_attr_name": "fstar",
"package_pname": "fstar-c671957efe8769b8fc421cd3e9da47b3fa57d510",
"package_pversion": "",
"package_platforms": [
"x86_64-linux",
"x86_64-darwin"
],
"package_outputs": [
"out"
],
"package_license": {}
},
{
"flake_description": "LiterateFStar",
"flake_resolved": {
"type": "github",
"owner": "W95Psp",
"repo": "LiterateFStar"
},
"flake_name": "LiterateFStar",
"flake_source": {
"type": "git",
"url": "github:W95Psp/LiterateFStar"
},
"app_bin": "/nix/store/mwwn9wzbgkdfac4ijj176akbkr9bxk5k-build",
"app_attr_name": "build",
"app_platforms": [
"x86_64-linux",
"x86_64-darwin"
],
"app_type": "derivation"
}
]

View file

@ -0,0 +1,50 @@
[
{
"flake_description": "OpenPGP CA is a tool for managing OpenPGP keys within an organization.",
"flake_resolved": {
"type": "github",
"owner": "ngi-nix",
"repo": "openpgp-ca"
},
"flake_name": "openpgp-ca",
"flake_source": {
"type": "git",
"url": "github:ngi-nix/openpgp-ca"
},
"package_attr_name": "openpgp-ca",
"package_pname": "openpgp-ca",
"package_pversion": "20200717",
"package_platforms": [
"x86_64-linux",
"x86_64-darwin"
],
"package_outputs": [
"out"
],
"package_description": "OpenPGP CA is a tool for managing OpenPGP keys within an organization.",
"package_license": {}
},
{
"flake_description": "OpenPGP CA is a tool for managing OpenPGP keys within an organization.",
"flake_resolved": {
"type": "github",
"owner": "ngi-nix",
"repo": "openpgp-ca"
},
"flake_name": "openpgp-ca",
"flake_source": {
"type": "git",
"url": "github:ngi-nix/openpgp-ca"
},
"package_attr_name": "openpgp-ca-docker",
"package_pname": "docker-image-openpgp-ca.tar.gz",
"package_pversion": "",
"package_platforms": [
"x86_64-linux"
],
"package_outputs": [
"out"
],
"package_license": {}
}
]

View file

@ -0,0 +1,50 @@
[
{
"flake_description": "A Simple multi-profile Nix-flake deploy tool.",
"flake_resolved": {
"type": "github",
"owner": "serokell",
"repo": "deploy-rs"
},
"flake_name": "deploy-rs",
"flake_source": {
"type": "git",
"url": "github:serokell/deploy-rs"
},
"package_attr_name": "deploy-rs",
"package_pname": "deploy-rs-0.1.0",
"package_pversion": "0.1.0",
"package_platforms": [
"x86_64-linux",
"x86_64-darwin",
"i686-linux",
"aarch64-linux"
],
"package_outputs": [
"out"
],
"package_license": {}
},
{
"flake_description": "A Simple multi-profile Nix-flake deploy tool.",
"flake_resolved": {
"type": "github",
"owner": "serokell",
"repo": "deploy-rs"
},
"flake_name": "deploy-rs",
"flake_source": {
"type": "git",
"url": "github:serokell/deploy-rs"
},
"app_bin": "/nix/store/lw8c19dkrr8a766qbl89nsfwbgwhp43q-deploy-rs-0.1.0/bin/deploy",
"app_attr_name": "deploy-rs",
"app_platforms": [
"x86_64-linux",
"x86_64-darwin",
"i686-linux",
"aarch64-linux"
],
"app_type": "app"
}
]

15
flake-info/examples/pull.sh Executable file
View file

@ -0,0 +1,15 @@
#! /usr/bin/env bash
# Run from cargo root as
# $ ./examples/pull.sh
echo "pulling examples in examples.txt"
examples=$(cat ./examples/examples.txt)
for flake in $examples; do
cargo run -- --flake "$flake" | jq > examples/"$(echo "$flake" | tr "/" "-")".json
done
echo "pulling excamples using json file"
cargo run -- --targets ./examples/examples.in.json | jq > examples/adaspark-offen.json

View file

@ -0,0 +1,354 @@
use anyhow::{Context, Result};
use commands::run_gc;
use flake_info::data::import::{Kind, NixOption};
use flake_info::data::{self, Export, Nixpkgs, Source};
use flake_info::elastic::{ElasticsearchError, ExistsStrategy};
use flake_info::{commands, elastic};
use log::{debug, error, info, warn};
use sha2::Digest;
use std::fs;
use std::path::{Path, PathBuf};
use std::ptr::hash;
use structopt::{clap::ArgGroup, StructOpt};
use thiserror::Error;
#[derive(StructOpt, Debug)]
#[structopt(
name = "flake-info",
about = "Extracts various information from a given flake",
group = ArgGroup::with_name("sources").required(false)
)]
struct Args {
#[structopt(subcommand)]
command: Command,
#[structopt(
short,
long,
help = "Kind of data to extract (packages|options|apps|all)",
default_value
)]
kind: data::import::Kind,
#[structopt(flatten)]
elastic: ElasticOpts,
#[structopt(help = "Extra arguments that are passed to nix as it")]
extra: Vec<String>,
}
#[derive(StructOpt, Debug)]
enum Command {
Flake {
#[structopt(help = "Flake identifier passed to nix to gather information about")]
flake: String,
#[structopt(
long,
help = "Whether to use a temporary store or not. Located at /tmp/flake-info-store"
)]
temp_store: bool,
#[structopt(long, help = "Whether to gc the store after info or not")]
gc: bool,
},
Nixpkgs {
#[structopt(help = "Nixpkgs channel to import")]
channel: String,
},
Group {
#[structopt(help = "Points to a JSON file containing info targets")]
targets: PathBuf,
name: String,
#[structopt(
long,
help = "Whether to use a temporary store or not. Located at /tmp/flake-info-store"
)]
temp_store: bool,
#[structopt(long, help = "Whether to gc the store after info or not")]
gc: bool,
},
}
#[derive(StructOpt, Debug)]
struct ElasticOpts {
#[structopt(long = "json", help = "Print ElasticSeach Compatible JSON output")]
json: bool,
#[structopt(
long = "push",
help = "Push to Elasticsearch (Configure using FI_ES_* environment variables)",
requires("elastic-schema-version")
)]
enable: bool,
#[structopt(
long,
short = "u",
env = "FI_ES_USER",
help = "Elasticsearch username (unimplemented)"
)]
elastic_user: Option<String>,
#[structopt(
long,
short = "p",
env = "FI_ES_PASSWORD",
help = "Elasticsearch password (unimplemented)"
)]
elastic_pw: Option<String>,
#[structopt(
long,
env = "FI_ES_URL",
default_value = "http://localhost:9200",
help = "Elasticsearch instance url"
)]
elastic_url: String,
#[structopt(
long,
help = "Name of the index to store results to",
env = "FI_ES_INDEX",
required_if("enable", "true")
)]
elastic_index_name: Option<String>,
#[structopt(
long,
help = "How to react to existing indices",
possible_values = &ExistsStrategy::variants(),
case_insensitive = true,
default_value = "abort",
env = "FI_ES_EXISTS_STRATEGY"
)]
elastic_exists: ExistsStrategy,
#[structopt(
long,
help = "Which schema version to associate with the operation",
env = "FI_ES_VERSION"
)]
elastic_schema_version: Option<usize>,
#[structopt(
long,
help = "Whether to disable `latest` alias creation",
env = "FI_ES_VERSION"
)]
no_alias: bool,
}
#[tokio::main]
async fn main() -> Result<()> {
env_logger::init();
let args = Args::from_args();
let command_result = run_command(args.command, args.kind, &args.extra).await;
if let Err(error) = command_result {
match error {
FlakeInfoError::Flake(ref e)
| FlakeInfoError::Nixpkgs(ref e)
| FlakeInfoError::IO(ref e) => {
error!("{}", e);
}
FlakeInfoError::Group(ref el) => {
el.iter().for_each(|e| error!("{}", e));
}
}
return Err(error.into());
}
let (successes, ident) = command_result.unwrap();
if args.elastic.enable {
push_to_elastic(&args.elastic, &successes, ident).await?;
}
if args.elastic.json {
println!("{}", serde_json::to_string(&successes)?);
}
Ok(())
}
#[derive(Debug, Error)]
enum FlakeInfoError {
#[error("Getting flake info caused an error: {0}")]
Flake(anyhow::Error),
#[error("Getting nixpkgs info caused an error: {0}")]
Nixpkgs(anyhow::Error),
#[error("Getting group info caused one or more errors: {0:?}")]
Group(Vec<anyhow::Error>),
#[error("Couldn't perform IO: {0}")]
IO(anyhow::Error),
}
async fn run_command(
command: Command,
kind: Kind,
extra: &[String],
) -> Result<(Vec<Export>, (String, String, String)), FlakeInfoError> {
match command {
Command::Flake {
flake,
temp_store,
gc,
} => {
let source = Source::Git { url: flake };
let exports = flake_info::process_flake(&source, &kind, temp_store, extra)
.map_err(FlakeInfoError::Flake)?;
let info = flake_info::get_flake_info(source.to_flake_ref(), temp_store, extra)
.map_err(FlakeInfoError::Flake)?;
let ident = ("flake".to_owned(), info.name, info.revision);
Ok((exports, ident))
}
Command::Nixpkgs { channel } => {
let nixpkgs = Source::nixpkgs(channel)
.await
.map_err(FlakeInfoError::Nixpkgs)?;
let ident = (
"nixpkgs".to_owned(),
nixpkgs.channel.clone(),
nixpkgs.git_ref.clone(),
);
let exports = flake_info::process_nixpkgs(&Source::Nixpkgs(nixpkgs), &kind)
.map_err(FlakeInfoError::Nixpkgs)?;
Ok((exports, ident))
}
Command::Group {
targets,
temp_store,
gc,
name,
} => {
let sources = Source::read_sources_file(&targets).map_err(FlakeInfoError::IO)?;
let (exports_and_hashes, errors) = sources
.iter()
.map(|source| match source {
Source::Nixpkgs(nixpkgs) => flake_info::process_nixpkgs(source, &kind)
.map(|result| (result, nixpkgs.git_ref.to_owned())),
_ => flake_info::process_flake(source, &kind, temp_store, &extra).and_then(
|result| {
flake_info::get_flake_info(source.to_flake_ref(), temp_store, extra)
.map(|info| (result, info.revision))
},
),
})
.partition::<Vec<_>, _>(Result::is_ok);
let (exports, hashes) = exports_and_hashes
.into_iter()
.map(|result| result.unwrap())
.fold(
(Vec::new(), Vec::new()),
|(mut exports, mut hashes), (export, hash)| {
exports.extend(export);
hashes.push(hash);
(exports, hashes)
},
);
let errors = errors
.into_iter()
.map(Result::unwrap_err)
.collect::<Vec<_>>();
if !errors.is_empty() {
return Err(FlakeInfoError::Group(errors));
}
let hash = {
let mut sha = sha2::Sha256::new();
for hash in hashes {
sha.update(hash);
}
format!("{:08x}", sha.finalize())
};
let ident = ("group".to_owned(), name, hash);
Ok((exports, ident))
}
}
}
async fn push_to_elastic(
elastic: &ElasticOpts,
successes: &[Export],
ident: (String, String, String),
) -> Result<()> {
let (index, alias) = elastic
.elastic_index_name
.to_owned()
.map(|ident| {
(
format!("{}-{}", elastic.elastic_schema_version.unwrap(), ident),
None,
)
})
.or_else(|| {
let (kind, name, hash) = ident;
let ident = format!(
"{}-{}-{}-{}",
kind,
elastic.elastic_schema_version.unwrap(),
&name,
hash
);
let alias = format!(
"latest-{}-{}-{}",
elastic.elastic_schema_version.unwrap(),
kind,
&name
);
warn!("Using automatic index identifier: {}", ident);
Some((ident, Some(alias)))
})
.unwrap();
info!("Pushing to elastic");
let es = elastic::Elasticsearch::new(elastic.elastic_url.as_str())?;
let config = elastic::Config {
index: &index,
exists_strategy: elastic.elastic_exists,
};
// catch error variant if abort strategy was triggered
let ensure = es.ensure_index(&config).await;
if let Err(ElasticsearchError::IndexExistsError(_)) = ensure {
// abort on abort
return Ok(());
} else {
// throw error if present
ensure?;
}
es.push_exports(&config, successes)
.await
.with_context(|| "Failed to push results to elasticsearch".to_string())?;
if let Some(alias) = alias {
if !elastic.no_alias {
es.write_alias(&config, &index, &alias)
.await
.with_context(|| "Failed to create alias".to_string())?;
} else {
warn!("Creating alias disabled")
}
}
Ok(())
}

View file

@ -0,0 +1,146 @@
{ flake }:
let
resolved = builtins.getFlake (toString flake);
nixpkgs = (import <nixpkgs> {});
lib = nixpkgs.lib;
default = drv: attr: default: if drv ? ${attr} then drv.${attr} else default;
# filter = lib.filterAttrs (key: _ : key == "apps" || key == "packages");
withSystem = fn: lib.mapAttrs (system: drvs: (fn system drvs));
isValid = d:
let
r = builtins.tryEval (lib.isDerivation d && ! (lib.attrByPath [ "meta" "broken" ] false d) && builtins.seq d.name true && d ? outputs);
in
r.success && r.value;
all = pkgs:
let
validPkgs = lib.filterAttrs (k: v: isValid v) pkgs;
in
validPkgs;
readPackages = system: drvs: lib.mapAttrsToList (
attribute_name: drv: (
# if isValid drv then
{
attribute_name = attribute_name;
system = system;
name = drv.name;
# TODO consider using `builtins.parseDrvName`
version = default drv "version" "";
outputs = drv.outputs;
# paths = builtins.listToAttrs ( map (output: {name = output; value = drv.${output};}) drv.outputs );
}
// lib.optionalAttrs (drv ? meta && drv.meta ? description) { inherit (drv.meta) description; }
// lib.optionalAttrs (drv ? meta && drv.meta ? license) { inherit (drv.meta) license; }
# else {}
)
) (all drvs);
readApps = system: apps: lib.mapAttrsToList (
attribute_name: app: (
{
attribute_name = attribute_name;
system = system;
}
// lib.optionalAttrs (app ? outPath) { bin = app.outPath; }
// lib.optionalAttrs (app ? program) { bin = app.program; }
// lib.optionalAttrs (app ? type) { type = app.type; }
)
) apps;
readOptions = modules: isNixOS: let
declarations = module: (
lib.evalModules {
modules = (if lib.isList module then module else [ module ]) ++ [
(
{ ... }: {
_module.check = false;
nixpkgs.system = lib.mkDefault "x86_64-linux";
nixpkgs.config.allowBroken = true;
}
)
];
}
).options;
cleanUpOption = module: opt:
let
applyOnAttr = n: f: lib.optionalAttrs (lib.hasAttr n opt) { ${n} = f opt.${n}; };
# mkDeclaration = decl: rec {
# path = stripModulePathPrefixes decl;
# url = mkModuleUrl path;
# channelPath = "${channelName}/${path}";
# };
# Replace functions by the string <function>
substFunction = x:
if builtins.isAttrs x then
lib.mapAttrs (name: substFunction) x
else if builtins.isList x then
map substFunction x
else if lib.isFunction x then
"<function>"
else
x;
in
opt
// applyOnAttr "example" substFunction
// applyOnAttr "default" substFunction
// applyOnAttr "type" substFunction
// lib.optionalAttrs (!isNixOS) { flake = [ flake module ]; };
# // applyOnAttr "declarations" (map mkDeclaration)
options = lib.mapAttrs (
attr: module: let
list = lib.optionAttrSetToDocList (declarations module);
in
map (cleanUpOption attr) (lib.filter (x: !x.internal) list )
) modules;
in
lib.flatten (builtins.attrValues options);
read = reader: set: lib.flatten (lib.attrValues (withSystem reader set));
legacyPackages' = read readPackages (default resolved "legacyPackages" {});
packages' = read readPackages (default resolved "packages" {});
apps' = read readApps (default resolved "apps" {});
collectSystems = lib.lists.foldr (
drv@{ attribute_name, system, ... }: set:
let
present = default set "${attribute_name}" ({ platforms = []; } // drv);
drv' = present // {
platforms = present.platforms ++ [ system ];
};
drv'' = removeAttrs drv' [ "system" ];
in
set // {
${attribute_name} = drv'';
}
) {};
in
rec {
legacyPackages = lib.attrValues (collectSystems legacyPackages');
packages = lib.attrValues (collectSystems packages');
apps = lib.attrValues (collectSystems apps');
options = readOptions (default resolved "nixosModules" {}) false;
nixos-options = readOptions (
{
"nixos" = import "${builtins.fetchTarball { url = flake; }}/nixos/modules/module-list.nix";
}
) true;
all = packages ++ apps ++ options;
}

View file

@ -0,0 +1,8 @@
mod nix_flake_attrs;
mod nix_flake_info;
mod nix_gc;
mod nixpkgs_info;
pub use nix_flake_attrs::get_derivation_info;
pub use nix_flake_info::get_flake_info;
pub use nix_gc::run_gc;
pub use nixpkgs_info::{get_nixpkgs_info, get_nixpkgs_options};

View file

@ -0,0 +1,51 @@
use crate::data::import::{FlakeEntry, Kind};
use anyhow::{Context, Result};
use command_run::{Command, LogTo};
use log::debug;
use std::fmt::Display;
use std::fs::File;
use std::io::Write;
use std::path::PathBuf;
const SCRIPT: &str = include_str!("flake_info.nix");
const ARGS: [&str; 3] = ["eval", "--json", "--no-write-lock-file"];
/// Uses `nix` to fetch the provided flake and read general information
/// about it using `nix flake info`
pub fn get_derivation_info<T: AsRef<str> + Display>(
flake_ref: T,
kind: Kind,
temp_store: bool,
extra: &[String],
) -> Result<Vec<FlakeEntry>> {
let script_dir = tempfile::tempdir()?;
let script_path = script_dir.path().join("extract.nix");
writeln!(File::create(&script_path)?, "{}", SCRIPT)?;
let mut command = Command::with_args("nix", ARGS.iter());
command.add_arg_pair("-f", script_path.as_os_str());
let command = command.add_args(["--arg", "flake", flake_ref.as_ref()].iter());
let command = command.add_arg(kind.as_ref());
if temp_store {
let temp_store_path = PathBuf::from("/tmp/flake-info-store");
if !temp_store_path.exists() {
std::fs::create_dir_all(&temp_store_path)
.with_context(|| "Couldn't create temporary store path")?;
}
command.add_arg_pair("--store", temp_store_path.canonicalize()?);
}
command.add_args(extra);
let mut command = command.enable_capture();
command.log_to = LogTo::Log;
command.log_output_on_error = true;
let parsed: Result<Vec<FlakeEntry>> = command
.run()
.with_context(|| format!("Failed to gather information about {}", flake_ref))
.and_then(|o| {
debug!("stderr: {}", o.stderr_string_lossy());
serde_json::de::from_str(&o.stdout_string_lossy())
.with_context(|| format!("Failed to analyze flake {}", flake_ref))
});
parsed
}

View file

@ -0,0 +1,39 @@
use anyhow::{Context, Result};
use command_run::{Command, LogTo};
use std::fmt::Display;
use std::path::PathBuf;
use crate::data::Flake;
/// Uses `nix` to fetch the provided flake and read general information
/// about it using `nix flake info`
pub fn get_flake_info<T: AsRef<str> + Display>(
flake_ref: T,
temp_store: bool,
extra: &[String],
) -> Result<Flake> {
let args = ["flake", "info", "--json", "--no-write-lock-file"].iter();
let mut command = Command::with_args("nix", args);
let command = command.add_arg(flake_ref.as_ref());
if temp_store {
let temp_store_path = PathBuf::from("/tmp/flake-info-store");
if !temp_store_path.exists() {
std::fs::create_dir_all(&temp_store_path)
.with_context(|| "Couldn't create temporary store path")?;
}
command.add_arg_pair("--store", temp_store_path.canonicalize()?);
}
command.add_args(extra);
let mut command = command.enable_capture();
command.log_to = LogTo::Log;
command.log_output_on_error = true;
command
.run()
.with_context(|| format!("Failed to gather information about {}", flake_ref))
.and_then(|o| {
let deserialized: Result<Flake, _> =
serde_json::de::from_str(o.stdout_string_lossy().to_string().as_str());
Ok(deserialized?.resolve_name())
})
}

View file

@ -0,0 +1,54 @@
use anyhow::{Context, Result};
use log::warn;
use std::{
path::{self, PathBuf},
process::Command,
};
use thiserror::Error;
#[derive(Error, Debug)]
pub enum GCError {
#[error("Unexpected exit status: {0}")]
ExitStatusError(String),
}
pub fn run_gc() -> Result<()> {
let temp_store_path = PathBuf::from("/tmp/flake-info-store");
if !temp_store_path.exists() {
warn!("Temporary store path does not exist, was a temporary store used?");
return Ok(());
}
let mut command = Command::new("nix-store");
command.args(&[
"--gc",
"--store",
temp_store_path.canonicalize()?.to_str().unwrap(),
]);
dbg!(&command);
let mut child = command
.spawn()
.with_context(|| "failed to start `nix-store gc` subprocess")?;
let result = child.wait()?;
if !result.success() {
return Err(GCError::ExitStatusError(format!("Code: {}", result.code().unwrap())).into());
}
std::fs::remove_dir_all(temp_store_path).with_context(|| "failed to clean up temp dir")?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_gc() {
run_gc().unwrap();
}
}

View file

@ -0,0 +1,96 @@
use anyhow::{Context, Result};
use std::io::Write;
use std::{collections::HashMap, fmt::Display, fs::File};
use command_run::Command;
use log::{debug, error};
use crate::data::import::{NixOption, NixpkgsEntry, Package};
const NIXPKGS_SCRIPT: &str = include_str!("packages-config.nix");
const FLAKE_INFO_SCRIPT: &str = include_str!("flake_info.nix");
pub fn get_nixpkgs_info<T: AsRef<str> + Display>(nixpkgs_channel: T) -> Result<Vec<NixpkgsEntry>> {
let script_dir = tempfile::tempdir()?;
let script_path = script_dir.path().join("packages-config.nix");
writeln!(File::create(&script_path)?, "{}", NIXPKGS_SCRIPT)?;
let mut command = Command::new("nix-env");
let command = command.enable_capture();
let command = command.add_args(&[
"-f",
"<nixpkgs>",
"-I",
format!("nixpkgs={}", nixpkgs_channel.as_ref()).as_str(),
"--arg",
"config",
format!("import {}", script_path.to_str().unwrap()).as_str(),
"-qa",
"--json",
]);
let parsed: Result<Vec<NixpkgsEntry>> = command
.run()
.with_context(|| {
format!(
"Failed to gather information about nixpkgs {}",
nixpkgs_channel.as_ref()
)
})
.and_then(|o| {
debug!("stderr: {}", o.stderr_string_lossy());
let attr_set: HashMap<String, Package> =
serde_json::de::from_str(&o.stdout_string_lossy())?;
Ok(attr_set
.into_iter()
.map(|(attribute, package)| NixpkgsEntry::Derivation { attribute, package })
.collect())
});
parsed
}
pub fn get_nixpkgs_options<T: AsRef<str> + Display>(
nixpkgs_channel: T,
) -> Result<Vec<NixpkgsEntry>> {
let script_dir = tempfile::tempdir()?;
let script_path = script_dir.path().join("flake_info.nix");
writeln!(File::create(&script_path)?, "{}", FLAKE_INFO_SCRIPT)?;
let mut command = Command::new("nix");
let command = command.enable_capture();
let mut command = command.add_args(&[
"eval",
"--json",
"-f",
script_path.to_str().unwrap(),
"--arg",
"flake",
nixpkgs_channel.as_ref(),
"nixos-options",
]);
// Nix might fail to evaluate some options that reference insecure packages
let mut env = HashMap::new();
env.insert("NIXPKGS_ALLOW_INSECURE".into(), "1".into());
env.insert("NIXPKGS_ALLOW_UNFREE".into(), "1".into());
command.env = env;
let parsed = command.run().with_context(|| {
format!(
"Failed to gather information about nixpkgs {}",
nixpkgs_channel.as_ref()
)
});
if let Err(ref e) = parsed {
error!("Command error: {}", e);
}
parsed.and_then(|o| {
debug!("stderr: {}", o.stderr_string_lossy());
let attr_set: Vec<NixOption> = serde_json::de::from_str(&o.stdout_string_lossy())?;
Ok(attr_set.into_iter().map(NixpkgsEntry::Option).collect())
})
}

View file

@ -0,0 +1,49 @@
{
# Ensures no aliases are in the results.
allowAliases = false;
# Also list unfree packages
allowUnfree = true;
# Enable recursion into attribute sets that nix-env normally doesn't look into
# so that we can get a more complete picture of the available packages for the
# purposes of the index.
packageOverrides = super:
let
recurseIntoAttrs = sets:
super.lib.genAttrs
(builtins.filter (set: builtins.hasAttr set super) sets)
(set: super.recurseIntoAttrs (builtins.getAttr set super));
in recurseIntoAttrs [
"roundcubePlugins"
"emscriptenfastcompPackages"
"fdbPackages"
"nodePackages_latest"
"nodePackages"
"platformioPackages"
"haskellPackages"
"haskell.compiler"
"idrisPackages"
"sconsPackages"
"gns3Packages"
"quicklispPackagesClisp"
"quicklispPackagesSBCL"
"rPackages"
"apacheHttpdPackages_2_4"
"zabbix44"
"zabbix40"
"zabbix30"
"fusePackages"
"nvidiaPackages"
"sourceHanPackages"
"atomPackages"
"emacs25Packages"
"emacs26Packages"
"emacs25.pkgs"
"emacs26.pkgs"
"emacs27.pkgs"
"steamPackages"
"ut2004Packages"
"zeroadPackages"
];
}

View file

@ -0,0 +1,369 @@
/// This module defines the unified putput format as expected by the elastic search
/// Additionally, we implement converseions from the two possible input formats, i.e.
/// Flakes, or Nixpkgs.
use std::path::PathBuf;
use crate::data::import::NixOption;
use serde::{Deserialize, Serialize};
use super::{
import,
system::System,
utility::{AttributeQuery, Flatten, OneOrMany, Reverse},
};
type Flake = super::Flake;
#[allow(non_snake_case)]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct License {
url: Option<String>,
fullName: String,
}
impl From<import::License> for License {
#[allow(non_snake_case)]
fn from(license: import::License) -> Self {
match license {
import::License::None { .. } => License {
url: None,
fullName: "No License Specified".to_string(),
},
import::License::Simple { license } => License {
url: None,
fullName: license,
},
import::License::Full { fullName, url, .. } => License { url, fullName },
import::License::Url { url } => License {
url: Some(url),
fullName: "No Name".into(),
},
}
}
}
// ----- Unified derivation representation
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum Derivation {
#[serde(rename = "package")]
Package {
package_attr_name: String,
package_attr_name_reverse: Reverse<String>,
package_attr_name_query: AttributeQuery,
package_attr_name_query_reverse: Reverse<AttributeQuery>,
package_attr_set: String,
package_attr_set_reverse: Reverse<String>,
package_pname: String,
package_pname_reverse: Reverse<String>,
package_pversion: String,
package_platforms: Vec<System>,
package_outputs: Vec<String>,
package_license: Vec<License>,
package_license_set: Vec<String>,
package_maintainers: Vec<Maintainer>,
package_maintainers_set: Vec<String>,
package_description: Option<String>,
package_description_reverse: Option<Reverse<String>>,
package_longDescription: Option<String>,
package_longDescription_reverse: Option<Reverse<String>>,
package_hydra: (),
package_system: String,
package_homepage: Vec<String>,
package_position: Option<String>,
},
#[serde(rename = "app")]
App {
app_attr_name: String,
app_platforms: Vec<System>,
app_type: Option<String>,
app_bin: Option<PathBuf>,
},
#[serde(rename = "option")]
Option {
option_source: Option<String>,
option_name: String,
option_name_reverse: Reverse<String>,
option_name_query: AttributeQuery,
option_name_query_reverse: Reverse<AttributeQuery>,
option_description: Option<String>,
option_description_reverse: Option<Reverse<String>>,
option_type: Option<String>,
option_default: Option<String>,
option_example: Option<String>,
option_flake: Option<(String, String)>,
},
}
// ----- Conversions
impl From<(import::FlakeEntry, super::Flake)> for Derivation {
fn from((d, f): (import::FlakeEntry, super::Flake)) -> Self {
match d {
import::FlakeEntry::Package {
attribute_name,
name,
version,
platforms,
outputs,
description,
license,
} => {
let package_attr_set: Vec<_> = attribute_name.split(".").collect();
let package_attr_set: String = (if package_attr_set.len() > 1 {
package_attr_set[0]
} else {
"No package set"
})
.into();
let package_attr_set_reverse = Reverse(package_attr_set.clone());
let package_license: Vec<License> = vec![license.into()];
let package_license_set: Vec<String> = package_license
.iter()
.clone()
.map(|l| l.fullName.to_owned())
.collect();
let maintainer: Maintainer = f.into();
Derivation::Package {
package_attr_name_query: AttributeQuery::new(&attribute_name),
package_attr_name_query_reverse: Reverse(AttributeQuery::new(&attribute_name)),
package_attr_name: attribute_name.clone(),
package_attr_name_reverse: Reverse(attribute_name),
package_attr_set,
package_attr_set_reverse,
package_pname: name.clone(),
package_pname_reverse: Reverse(name),
package_pversion: version,
package_platforms: platforms,
package_outputs: outputs,
package_license,
package_license_set,
package_description: description.clone(),
package_maintainers: vec![maintainer.clone()],
package_maintainers_set: maintainer.name.map_or(vec![], |n| vec![n]),
package_description_reverse: description.map(Reverse),
package_longDescription: None,
package_longDescription_reverse: None,
package_hydra: (),
package_system: String::new(),
package_homepage: Vec::new(),
package_position: None,
}
}
import::FlakeEntry::App {
bin,
attribute_name,
platforms,
app_type,
} => Derivation::App {
app_attr_name: attribute_name,
app_platforms: platforms,
app_bin: bin,
app_type,
},
import::FlakeEntry::Option(option) => option.into(),
}
}
}
impl From<import::NixpkgsEntry> for Derivation {
fn from(entry: import::NixpkgsEntry) -> Self {
match entry {
import::NixpkgsEntry::Derivation { attribute, package } => {
let package_attr_set: Vec<_> = attribute.split(".").collect();
let package_attr_set: String = (if package_attr_set.len() > 1 {
package_attr_set[0]
} else {
"No package set"
})
.into();
let package_attr_set_reverse = Reverse(package_attr_set.clone());
let package_license: Vec<_> = package
.meta
.license
.map(OneOrMany::into_list)
.unwrap_or_default()
.into_iter()
.map(|sos| sos.0.into())
.collect();
let package_license_set = package_license
.iter()
.map(|l: &License| l.fullName.to_owned())
.collect();
let package_maintainers = package
.meta
.maintainers
.map_or(Default::default(), Flatten::flatten);
let package_maintainers_set = package_maintainers
.iter()
.filter(|m| m.name.is_some())
.map(|m| m.name.to_owned().unwrap())
.collect();
Derivation::Package {
package_attr_name: attribute.clone(),
package_attr_name_reverse: Reverse(attribute.clone()),
package_attr_name_query: AttributeQuery::new(&attribute),
package_attr_name_query_reverse: Reverse(AttributeQuery::new(&attribute)),
package_attr_set,
package_attr_set_reverse,
package_pname: package.pname.clone(),
package_pname_reverse: Reverse(package.pname),
package_pversion: package.version,
package_platforms: package
.meta
.platforms
.map(Flatten::flatten)
.unwrap_or_default(),
package_outputs: package.meta.outputs.unwrap_or_default(),
package_license,
package_license_set,
package_maintainers,
package_maintainers_set,
package_description: package.meta.description.clone(),
package_description_reverse: package.meta.description.map(Reverse),
package_longDescription: package.meta.long_description.clone(),
package_longDescription_reverse: package.meta.long_description.map(Reverse),
package_hydra: (),
package_system: package.system,
package_homepage: package
.meta
.homepage
.map_or(Default::default(), OneOrMany::into_list),
package_position: package.meta.position,
}
}
import::NixpkgsEntry::Option(option) => option.into(),
}
}
}
impl From<import::NixOption> for Derivation {
fn from(
NixOption {
declarations,
description,
name,
option_type,
default,
example,
flake,
}: import::NixOption,
) -> Self {
Derivation::Option {
option_source: declarations.get(0).map(Clone::clone),
option_name: name.clone(),
option_name_reverse: Reverse(name.clone()),
option_description: description.clone(),
option_description_reverse: description.map(Reverse),
option_default: default.map(|v| {
v.as_str().map_or_else(
|| serde_json::to_string_pretty(&v).unwrap(),
|s| s.to_owned(),
)
}),
option_example: example.map(|v| {
v.as_str().map_or_else(
|| serde_json::to_string_pretty(&v).unwrap(),
|s| s.to_owned(),
)
}),
option_flake: flake,
option_type,
option_name_query: AttributeQuery::new(&name),
option_name_query_reverse: Reverse(AttributeQuery::new(&name)),
}
}
}
type Maintainer = import::Maintainer;
impl From<super::Flake> for Maintainer {
fn from(flake: super::Flake) -> Self {
let github = flake
.source
.and_then(|source| match source {
super::Source::Github { owner, .. } => Some(owner),
_ => None,
})
.unwrap_or_else(|| "Maintainer Unknown".to_string());
Maintainer {
github: Some(github),
email: None,
name: None,
}
}
}
// ----- output type
/// Export type that brings together derivation and optional flake info
#[derive(Debug, Clone, PartialEq, Serialize)]
pub struct Export {
#[serde(flatten)]
flake: Option<Flake>,
#[serde(flatten)]
item: Derivation,
}
impl Export {
/// Construct Export from Flake and Flake entry
pub fn flake(flake: Flake, item: import::FlakeEntry) -> Self {
Self {
flake: Some(flake.clone()),
item: Derivation::from((item, flake)),
}
}
/// Construct Export from NixpkgsEntry
pub fn nixpkgs(item: import::NixpkgsEntry) -> Self {
Self {
flake: None,
item: Derivation::from(item),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_option() {
let option: NixOption = serde_json::from_str(r#"
{
"declarations":["/nix/store/s1q1238ahiks5a4g6j6qhhfb3rlmamvz-source/nixos/modules/system/boot/luksroot.nix"],
"default":"",
"description":"Commands that should be run right after we have mounted our LUKS device.\n",
"example":"oneline\ntwoline\nthreeline\n",
"internal":false,
"loc":["boot","initrd","luks","devices","<name>","postOpenCommands"],
"name":"boot.initrd.luks.devices.<name>.postOpenCommands",
"readOnly":false,"type":
"strings concatenated with \"\\n\"","visible":true
}"#).unwrap();
let option: Derivation = option.into();
println!("{}", serde_json::to_string_pretty(&option).unwrap());
}
}

View file

@ -0,0 +1,87 @@
use std::path::PathBuf;
use serde::{Deserialize, Serialize};
use super::Source;
/// Holds general infoamtion about a flake
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Flake {
#[serde(
rename(serialize = "flake_description"),
skip_serializing_if = "Option::is_none"
)]
pub description: Option<String>,
#[serde(rename(serialize = "flake_path"), skip_serializing)]
pub path: PathBuf,
#[serde(rename(serialize = "flake_resolved"))]
pub resolved: Repo,
#[serde(rename(serialize = "flake_name"), skip_deserializing)]
pub name: String,
pub revision: String,
#[serde(
skip_deserializing,
rename(serialize = "flake_source"),
skip_serializing_if = "Option::is_none"
)]
pub source: Option<Source>,
}
impl Flake {
pub(crate) fn resolve_name(mut self) -> Self {
self.name = match &self.resolved {
Repo::Git { .. } => Default::default(),
Repo::GitHub { repo, .. } => repo.clone(),
Repo::Gitlab { repo, .. } => repo.clone(),
};
self
}
}
/// Information about the flake origin
/// Supports (local/raw) Git, GitHub and Gitlab repos
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "lowercase")]
pub enum Repo {
Git { url: PathBuf },
GitHub { owner: String, repo: String },
Gitlab { owner: String, repo: String },
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn gitlab_flake() {
let nix_info_out = r#"{"description":"neuropil is a secure messaging library for IoT, robotics and more.","lastModified":1616059502,"locked":{"lastModified":1616059502,"narHash":"sha256-fHB1vyjDXQq/E2/Xb6Xs3caAAc0VkUlnzu5kl/PvFW4=","owner":"pi-lar","repo":"neuropil","rev":"9e2f634ffa45da3f5feb158a12ee32e1673bfe35","type":"gitlab"},"original":{"owner":"pi-lar","repo":"neuropil","type":"gitlab"},"originalUrl":"gitlab:pi-lar/neuropil","path":"/nix/store/z4fp2fc9hca40nnvxi0116pfbrla5zgl-source","resolved":{"owner":"pi-lar","repo":"neuropil","type":"gitlab"},"resolvedUrl":"gitlab:pi-lar/neuropil","revision":"9e2f634ffa45da3f5feb158a12ee32e1673bfe35","url":"gitlab:pi-lar/neuropil/9e2f634ffa45da3f5feb158a12ee32e1673bfe35"}"#;
assert_eq!(
serde_json::de::from_str::<Flake>(nix_info_out).unwrap(),
Flake {
description: Some(
"neuropil is a secure messaging library for IoT, robotics and more.".into()
),
path: "/nix/store/z4fp2fc9hca40nnvxi0116pfbrla5zgl-source".into(),
resolved: Repo::Gitlab {
owner: "pi-lar".into(),
repo: "neuropil".into()
},
name: "".into(),
source: None,
revision: "9e2f634ffa45da3f5feb158a12ee32e1673bfe35".into()
}
);
assert_eq!(
serde_json::de::from_str::<Flake>(nix_info_out)
.unwrap()
.resolve_name()
.name,
"neuropil"
);
}
}

View file

@ -0,0 +1,321 @@
use std::fmt::{self, write, Display};
use std::marker::PhantomData;
use std::{path::PathBuf, str::FromStr};
use serde::de::{self, MapAccess, Visitor};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde_json::Value;
use thiserror::Error;
use super::system::System;
use super::utility::{Flatten, OneOrMany};
/// Holds information about a specific derivation
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum FlakeEntry {
/// A package as it may be defined in a flake
///
/// Note: As flakes do not enforce any particular structure to be necessarily
/// present, the data represented is an idealization that _should_ match in
/// most cases and is open to extension.
Package {
attribute_name: String,
name: String,
version: String,
platforms: Vec<System>,
outputs: Vec<String>,
description: Option<String>,
#[serde(deserialize_with = "string_or_struct", default)]
license: License,
},
/// An "application" that can be called using nix run <..>
App {
bin: Option<PathBuf>,
attribute_name: String,
platforms: Vec<System>,
app_type: Option<String>,
},
/// an option defined in a module of a flake
Option(NixOption),
}
/// The representation of an option that is part of some module and can be used
/// in some nixos configuration
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct NixOption {
/// Location of the defining module(s)
pub declarations: Vec<String>,
pub description: Option<String>,
pub name: String,
#[serde(rename = "type")]
/// Nix generated description of the options type
pub option_type: Option<String>,
pub default: Option<Value>,
pub example: Option<Value>,
/// If defined in a flake, contains defining flake and module
pub flake: Option<(String, String)>,
}
/// Package as defined in nixpkgs
/// These packages usually have a "more" homogenic structure that is given by
/// nixpkgs
/// note: This is the parsing module that deals with nested input. A flattened,
/// unified representation can be found in [crate::data::export::Derivation]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Package {
pub pname: String,
pub version: String,
pub system: String,
pub meta: Meta,
}
/// The nixpkgs output lists attribute names as keys of a map.
/// Name and Package definition are combined using this struct
#[derive(Debug, Clone)]
pub enum NixpkgsEntry {
Derivation { attribute: String, package: Package },
Option(NixOption),
}
/// Most information about packages in nixpkgs is contained in the meta key
/// This struct represents a subset of that metadata
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Meta {
#[serde(rename = "outputsToInstall")]
pub outputs: Option<Vec<String>>,
pub license: Option<OneOrMany<StringOrStruct<License>>>,
pub maintainers: Option<Flatten<Maintainer>>,
pub homepage: Option<OneOrMany<String>>,
pub platforms: Option<Flatten<System>>,
pub position: Option<String>,
pub description: Option<String>,
#[serde(rename = "longDescription")]
pub long_description: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Maintainer {
pub name: Option<String>,
pub github: Option<String>,
pub email: Option<String>,
}
/// The type of derivation (placed in packages.<system> or apps.<system>)
/// Used to command the extraction script
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub enum Kind {
App,
Package,
Option,
All,
}
impl AsRef<str> for Kind {
fn as_ref(&self) -> &str {
match self {
Kind::App => "app",
Kind::Package => "packages",
Kind::Option => "options",
Kind::All => "all",
}
}
}
impl Display for Kind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.as_ref())
}
}
#[derive(Debug, Error)]
pub enum ParseKindError {
#[error("Failed to parse kind: {0}")]
UnknownKind(String),
}
impl FromStr for Kind {
type Err = ParseKindError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let kind = match s {
"app" => Kind::App,
"packages" => Kind::Package,
"options" => Kind::Option,
"all" => Kind::All,
_ => return Err(ParseKindError::UnknownKind(s.into())),
};
Ok(kind)
}
}
impl Default for Kind {
fn default() -> Self {
Kind::All
}
}
#[derive(Debug, Clone, PartialEq, Serialize)]
pub struct StringOrStruct<T>(pub T);
impl<'de, T> Deserialize<'de> for StringOrStruct<T>
where
T: Deserialize<'de> + FromStr<Err = anyhow::Error>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Ok(StringOrStruct(string_or_struct(deserializer)?))
}
}
/// Different representations of the licence attribute
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum License {
None {
#[serde(skip_serializing)]
license: (),
},
Simple {
license: String,
},
Full {
fullName: String,
// shortName: String,
url: Option<String>,
},
Url {
url: String,
},
}
impl Default for License {
fn default() -> Self {
License::None { license: () }
}
}
impl FromStr for License {
// This implementation of `from_str` can never fail, so use the impossible
// `Void` type as the error type.
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(License::Simple {
license: s.to_string(),
})
}
}
/// Deserialization helper that parses an item using either serde or fromString
fn string_or_struct<'de, T, D>(deserializer: D) -> Result<T, D::Error>
where
T: Deserialize<'de> + FromStr<Err = anyhow::Error>,
D: Deserializer<'de>,
{
// This is a Visitor that forwards string types to T's `FromStr` impl and
// forwards map types to T's `Deserialize` impl. The `PhantomData` is to
// keep the compiler from complaining about T being an unused generic type
// parameter. We need T in order to know the Value type for the Visitor
// impl.
struct StringOrStruct<T>(PhantomData<fn() -> T>);
impl<'de, T> Visitor<'de> for StringOrStruct<T>
where
T: Deserialize<'de> + FromStr<Err = anyhow::Error>,
{
type Value = T;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("string or map")
}
fn visit_str<E>(self, value: &str) -> Result<T, E>
where
E: de::Error,
{
Ok(FromStr::from_str(value).unwrap())
}
fn visit_map<M>(self, map: M) -> Result<T, M::Error>
where
M: MapAccess<'de>,
{
// `MapAccessDeserializer` is a wrapper that turns a `MapAccess`
// into a `Deserializer`, allowing it to be used as the input to T's
// `Deserialize` implementation. T then deserializes itself using
// the entries from the map visitor.
Deserialize::deserialize(de::value::MapAccessDeserializer::new(map))
}
}
deserializer.deserialize_any(StringOrStruct(PhantomData))
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use serde_json::Value;
use super::*;
#[test]
fn test_nixpkgs_deserialize() {
let json = r#"
{
"nixpkgs-unstable._0verkill": {
"name": "0verkill-unstable-2011-01-13",
"pname": "0verkill-unstable",
"version": "2011-01-13",
"system": "x86_64-darwin",
"meta": {
"available": true,
"broken": false,
"description": "ASCII-ART bloody 2D action deathmatch-like game",
"homepage": "https://github.com/hackndev/0verkill",
"insecure": false,
"license": {
"fullName": "GNU General Public License v2.0 only",
"shortName": "gpl2Only",
"spdxId": "GPL-2.0-only",
"url": "https://spdx.org/licenses/GPL-2.0-only.html"
},
"maintainers": [
{
"email": "torres.anderson.85@protonmail.com",
"github": "AndersonTorres",
"githubId": 5954806,
"name": "Anderson Torres"
}
],
"name": "0verkill-unstable-2011-01-13",
"outputsToInstall": [
"out"
],
"platforms": [
"powerpc64-linux",
"powerpc64le-linux",
"riscv32-linux",
"riscv64-linux"
],
"position": "/nix/store/97lxf2n6zip41j5flbv6b0928mxv9za8-nixpkgs-unstable-21.03pre268853.d9c6f13e13f/nixpkgs-unstable/pkgs/games/0verkill/default.nix:34",
"unfree": false,
"unsupported": false
}
}
}
"#;
let map: HashMap<String, Package> = serde_json::from_str(json).unwrap();
let _: Vec<NixpkgsEntry> = map
.into_iter()
.map(|(attribute, package)| NixpkgsEntry::Derivation { attribute, package })
.collect();
}
}

View file

@ -0,0 +1,10 @@
mod export;
mod flake;
pub mod import;
mod source;
mod system;
mod utility;
pub use export::Export;
pub use flake::{Flake, Repo};
pub use source::{FlakeRef, Hash, Nixpkgs, Source};

View file

@ -0,0 +1,111 @@
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
use std::{
fs::{self, File},
path::Path,
};
pub type Hash = String;
pub type FlakeRef = String;
/// Information about the flake origin
/// Supports (local/raw) Git, GitHub and Gitlab repos
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "lowercase")]
pub enum Source {
Github {
owner: String,
repo: String,
description: Option<String>,
#[serde(rename(deserialize = "hash"))]
git_ref: Option<Hash>,
},
Gitlab {
owner: String,
repo: String,
git_ref: Option<Hash>,
},
Git {
url: String,
},
Nixpkgs(Nixpkgs),
}
impl Source {
pub fn to_flake_ref(&self) -> FlakeRef {
match self {
Source::Github {
owner,
repo,
git_ref,
..
} => format!(
"github:{}/{}{}",
owner,
repo,
git_ref
.as_ref()
.map_or("".to_string(), |f| format!("?ref={}", f))
),
Source::Gitlab {
owner,
repo,
git_ref,
} => format!(
"gitlab:{}/{}{}",
owner,
repo,
git_ref
.as_ref()
.map_or("".to_string(), |f| format!("?ref={}", f))
),
Source::Git { url } => url.to_string(),
Source::Nixpkgs(Nixpkgs { git_ref, .. }) => format!(
"https://api.github.com/repos/NixOS/nixpkgs/tarball/{}",
git_ref
),
}
}
pub fn read_sources_file(path: &Path) -> Result<Vec<Source>> {
let file = File::open(path).with_context(|| "Failed to open input file")?;
Ok(serde_json::from_reader(file)?)
}
pub async fn nixpkgs(channel: String) -> Result<Nixpkgs> {
#[derive(Deserialize, Debug)]
struct ApiResult {
commit: Commit,
}
#[derive(Deserialize, Debug)]
struct Commit {
sha: String,
}
let git_ref = reqwest::Client::builder()
.user_agent("curl") // thank you github
.build()?
.get(format!(
"https://api.github.com/repos/nixos/nixpkgs/branches/nixos-{}",
channel
))
.send()
.await?
.json::<ApiResult>()
.await?
.commit
.sha;
let nixpkgs = Nixpkgs { channel, git_ref };
Ok(nixpkgs)
}
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Nixpkgs {
pub channel: String,
pub git_ref: String,
}

View file

@ -0,0 +1,38 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum System {
Plain(String),
Detailed { cpu: Cpu, kernel: Kernel },
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Cpu {
family: String,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Kernel {
name: String,
}
impl ToString for System {
fn to_string(&self) -> String {
match self {
System::Plain(system) => system.to_owned(),
System::Detailed { cpu, kernel } => format!("{}-{}", cpu.family, kernel.name),
}
}
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct InstancePlatform {
system: System,
version: String,
}
#[cfg(test)]
mod tests {
use super::*;
}

View file

@ -0,0 +1,237 @@
use fancy_regex::Regex;
use lazy_static::lazy_static;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
#[derive(Debug, Clone, PartialEq)]
pub struct Reverse<T: Reversable + Serialize>(pub T);
pub trait Reversable {
fn reverse(&self) -> Self;
}
impl Reversable for String {
fn reverse(&self) -> Self {
self.chars().rev().collect::<String>()
}
}
impl<T: Reversable + Clone> Reversable for Vec<T> {
fn reverse(&self) -> Self {
self.iter().cloned().map(|item| item.reverse()).collect()
}
}
impl<T> Reversable for Reverse<T>
where
T: Reversable + Serialize,
{
fn reverse(&self) -> Self {
Reverse(self.0.reverse())
}
}
impl<T> Serialize for Reverse<T>
where
T: Reversable + Serialize,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
self.0.reverse().serialize(serializer)
}
}
impl<'de, T> Deserialize<'de> for Reverse<T>
where
T: Reversable + Serialize + Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> Result<Reverse<T>, D::Error>
where
D: Deserializer<'de>,
{
Ok(Reverse(T::deserialize(deserializer)?.reverse()))
}
}
/// A utility type that can represent the presence of either a single associated
/// value or a list of those. Adding absence can be achieved by wrapping the type
/// in an [Option]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum OneOrMany<T> {
#[serde(serialize_with = "list")]
One(T),
Many(Vec<T>),
}
impl<T> OneOrMany<T> {
pub fn into_list(self) -> Vec<T> {
match self {
OneOrMany::One(one) => vec![one],
OneOrMany::Many(many) => many,
}
}
}
/// A utility type that flattens lists of lists as seen with `maintainers` and `platforms` on selected packages
/// in an [Option]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum Flatten<T> {
#[serde(serialize_with = "list")]
Single(T),
Deep(Vec<Flatten<T>>),
}
impl<T: Clone> Flatten<T> {
pub fn flatten(self) -> Vec<T> {
match self {
Flatten::Single(s) => vec![s],
Flatten::Deep(v) => v.into_iter().map(Flatten::flatten).flatten().collect(),
}
}
}
// TODO: use this or a to_ist function?
/// Serialization helper that serializes single elements as a list with a single
/// item
pub fn list<T, S>(item: &T, s: S) -> Result<S::Ok, S::Error>
where
T: Serialize,
S: Serializer,
{
s.collect_seq(vec![item].iter())
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AttributeQuery(Vec<String>);
lazy_static! {
static ref QUERY: Regex =
Regex::new(".+?(?:(?<=[a-z])(?=[1-9A-Z])|(?<=[1-9A-Z])(?=[A-Z][a-z])|[._-]|$)").unwrap();
}
impl AttributeQuery {
pub fn new(attribute_name: &str) -> Self {
const SUFFIX: &[char] = &['-', '.', '_'];
let matches = QUERY
.find_iter(attribute_name)
.map(|found| found.unwrap().as_str())
.collect::<Vec<_>>();
let tokens = (0..matches.len())
.flat_map(|index| {
let (_, tokens) = matches.iter().skip(index).fold(
(String::new(), Vec::new()),
|(prev_parts, mut tokens), part| {
let token: String = prev_parts + part;
tokens.push(token.trim_end_matches(SUFFIX).to_owned());
(token, tokens)
},
);
tokens
})
.collect::<Vec<_>>();
AttributeQuery(tokens)
}
pub fn query(&self) -> &[String] {
&self.0
}
}
impl Reversable for AttributeQuery {
fn reverse(&self) -> Self {
AttributeQuery(self.query().to_owned().reverse())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn attr_query_test() {
assert_eq!(
{
let mut q = AttributeQuery::new("services.nginx.extraConfig")
.query()
.to_owned();
q.sort();
q
},
{
let mut ex = [
"services.nginx.extraConfig",
"services.nginx.extra",
"services.nginx",
"services",
"nginx.extraConfig",
"nginx.extra",
"nginx",
"extraConfig",
"extra",
"Config",
];
ex.sort_unstable();
ex
},
);
assert_eq!(
{
let mut q = AttributeQuery::new("python37Packages.test1_name-test2")
.query()
.to_owned();
q.sort();
q
},
{
let mut ex = [
"python37Packages.test1_name-test2",
"python37Packages.test1_name-test",
"python37Packages.test1_name",
"python37Packages.test1",
"python37Packages.test",
"python37Packages",
"python37",
"python",
"37Packages.test1_name-test2",
"37Packages.test1_name-test",
"37Packages.test1_name",
"37Packages.test1",
"37Packages.test",
"37Packages",
"37",
"Packages.test1_name-test2",
"Packages.test1_name-test",
"Packages.test1_name",
"Packages.test1",
"Packages.test",
"Packages",
"test1_name-test2",
"test1_name-test",
"test1_name",
"test1",
"test",
"1_name-test2",
"1_name-test",
"1_name",
"1",
"name-test2",
"name-test",
"name",
"test2",
"test",
"2",
];
ex.sort_unstable();
ex
}
);
}
}

515
flake-info/src/elastic.rs Normal file
View file

@ -0,0 +1,515 @@
use std::{borrow::Borrow, collections::HashMap};
use clap::arg_enum;
pub use elasticsearch::http::transport::Transport;
use elasticsearch::{BulkOperation, Elasticsearch as Client, http::response::{self, Response}, indices::{IndicesCreateParts, IndicesDeleteAliasParts, IndicesDeleteParts, IndicesExistsParts, IndicesGetAliasParts, IndicesPutAliasParts, IndicesUpdateAliasesParts}};
use lazy_static::lazy_static;
use log::{info, warn};
use serde_json::{json, Value};
use thiserror::Error;
use crate::data::Export;
lazy_static! {
static ref MAPPING: Value = json!({
"mappings": {
"properties": {
"type": {"type": "keyword"},
"flake_name": {
"type": "text",
"analyzer": "english",
},
"flake_description": {
"type": "text",
"analyzer": "english",
},
"flake_resolved": {
"type": "nested",
"properties": {
"type": {
"type": "keyword"
},
"owner": {
"type": "keyword"
},
"repo": {
"type": "keyword"
},
}
},
"flake_source": {
"type": "nested",
"properties": {
"type": {
"type": "keyword"
},
"owner": {
"type": "keyword"
},
"repo": {
"type": "keyword"
},
"desciption": {
"type": "text",
"analyzer": "english",
},
"git_ref": {
"type": "keyword"
},
"url": {
"type": "keyword"
},
}
},
"package_attr_name": {
"type": "keyword",
"fields": {"edge": {"type": "text", "analyzer": "edge"}},
},
"package_attr_name_reverse": {
"type": "keyword",
"fields": {"edge": {"type": "text", "analyzer": "edge"}},
},
"package_attr_name_query": {
"type": "keyword",
"fields": {"edge": {"type": "text", "analyzer": "edge"}},
},
"package_attr_name_query_reverse": {
"type": "keyword",
"fields": {"edge": {"type": "text", "analyzer": "edge"}},
},
"package_attr_set": {
"type": "keyword",
"fields": {"edge": {"type": "text", "analyzer": "edge"}},
},
"package_attr_set_reverse": {
"type": "keyword",
"fields": {"edge": {"type": "text", "analyzer": "edge"}},
},
"package_pname": {
"type": "keyword",
"fields": {"edge": {"type": "text", "analyzer": "edge"}},
},
"package_pname_reverse": {
"type": "keyword",
"fields": {"edge": {"type": "text", "analyzer": "edge"}},
},
"package_pversion": {
"type": "keyword"
},
"package_platforms": {
"type": "keyword"
},
"package_system": {
"type": "keyword"
},
"package_position": {
"type": "text"
},
"package_outputs": {
"type": "keyword"
},
"package_description": {
"type": "text",
"analyzer": "english",
"fields": {"edge": {"type": "text", "analyzer": "edge"}},
},
"package_description_reverse": {
"type": "text",
"analyzer": "english",
"fields": {"edge": {"type": "text", "analyzer": "edge"}},
},
"package_longDescription": {
"type": "text",
"analyzer": "english",
"fields": {"edge": {"type": "text", "analyzer": "edge"}},
},
"package_longDescription_reverse": {
"type": "text",
"analyzer": "english",
"fields": {"edge": {"type": "text", "analyzer": "edge"}},
},
"package_license": {
"type": "nested",
"properties": {
"fullName": {"type": "text"},
"url": {"type": "text"}},
},
"package_license_set": {"type": "keyword"},
"package_maintainers": {
"type": "nested",
"properties": {
"name": {"type": "text"},
"email": {"type": "text"},
"github": {"type": "text"},
},
},
"package_maintainers_set": {"type": "keyword"},
"package_homepage": {
"type": "keyword"
},
// Options fields
"option_name": {
"type": "keyword",
"fields": {"edge": {"type": "text", "analyzer": "edge"}},
},
"option_name_reverse": {
"type": "keyword",
"fields": {"edge": {"type": "text", "analyzer": "edge"}},
},
"option_name": {
"type": "keyword",
"fields": {"edge": {"type": "text", "analyzer": "edge"}},
},
"option_name_reverse": {
"type": "keyword",
"fields": {"edge": {"type": "text", "analyzer": "edge"}},
},
"option_description": {
"type": "text",
"analyzer": "english",
"fields": {"edge": {"type": "text", "analyzer": "edge"}},
},
"option_description_reverse": {
"type": "text",
"analyzer": "english",
"fields": {"edge": {"type": "text", "analyzer": "edge"}},
},
"option_type": {"type": "keyword"},
"option_default": {"type": "text"},
"option_example": {"type": "text"},
"option_source": {"type": "keyword"},
}
},
"settings": {
"analysis": {
"normalizer": {
"lowercase": {"type": "custom", "char_filter": [], "filter": ["lowercase"]}
},
"tokenizer": {
"edge": {
"type": "edge_ngram",
"min_gram": 2,
"max_gram": 50,
"token_chars": [
"letter",
"digit",
// Either we use them or we would need to strip them before that.
"punctuation",
"symbol",
],
},
},
"analyzer": {
"edge": {"tokenizer": "edge", "filter": ["lowercase"]},
"lowercase": {
"type": "custom",
"tokenizer": "keyword",
"filter": ["lowercase"],
},
},
}
}
});
}
#[derive(Default)]
pub struct Elasticsearch {
client: Client,
}
#[derive(Error, Debug)]
pub enum ElasticsearchError {
#[error("Transport failed to initialize: {0}")]
TransportInitError(elasticsearch::Error),
#[error("Failed to send push exports: {0}")]
PushError(elasticsearch::Error),
#[error("Push exports returned bad result: {0:?}")]
PushResponseError(response::Exception),
#[error("Failed to iitialize index: {0}")]
InitIndexError(elasticsearch::Error),
#[error("Push exports returned bad result: {0:?}")]
InitResponseError(response::Exception),
#[error("An unexpected error occured in the elastic search client: {0}")]
ClientError(elasticsearch::Error),
#[error("Failed to serialize exported data: {0}")]
SerializationError(#[from] serde_json::Error),
#[error("An index with the name \"{0}\" already exists and the (default) stategy is abort")]
IndexExistsError(String),
}
impl Elasticsearch {
pub fn new(url: &str) -> Result<Self, ElasticsearchError> {
let transport =
Transport::single_node(url).map_err(ElasticsearchError::TransportInitError)?;
let client = Client::new(transport);
Ok(Elasticsearch { client })
}
pub fn with_transport(transport: Transport) -> Self {
let client = Client::new(transport);
Elasticsearch { client }
}
pub async fn push_exports(
&self,
config: &Config<'_>,
exports: &[Export],
) -> Result<(), ElasticsearchError> {
// let exports: Result<Vec<Value>, serde_json::Error> = exports.iter().map(serde_json::to_value).collect();
// let exports = exports?;
let bodies = exports.chunks(10_000).map(|chunk| {
chunk
.iter()
.map(|e| BulkOperation::from(BulkOperation::index(e)))
});
for body in bodies {
let response = self
.client
.bulk(elasticsearch::BulkParts::Index(config.index))
.body(body.collect())
.send()
.await
.map_err(ElasticsearchError::PushError)?;
dbg!(response)
.exception()
.await
.map_err(ElasticsearchError::ClientError)?
.map(ElasticsearchError::PushResponseError)
.map_or(Ok(()), Err)?;
}
Ok(())
}
pub async fn ensure_index(&self, config: &Config<'_>) -> Result<(), ElasticsearchError> {
let exists = self.check_index(config).await?;
if exists {
match config.exists_strategy {
ExistsStrategy::Abort => {
warn!(
"Index \"{}\" already exists, strategy is: Abort push",
config.index
);
return Err(ElasticsearchError::IndexExistsError(
config.index.to_owned(),
));
}
ExistsStrategy::Ignore => {
warn!(
"Index \"{}\" already exists, strategy is: Ignore, proceed push",
config.index
);
return Ok(());
}
ExistsStrategy::Recreate => {
warn!(
"Index \"{}\" already exists, strategy is: Recreate index",
config.index
);
self.clear_index(config).await?;
}
}
}
let response = self
.client
.indices()
.create(IndicesCreateParts::Index(config.index))
.body(MAPPING.as_object())
.send()
.await
.map_err(ElasticsearchError::InitIndexError)?;
dbg!(response)
.exception()
.await
.map_err(ElasticsearchError::ClientError)?
.map(ElasticsearchError::PushResponseError)
.map_or(Ok(()), Err)?;
Ok(())
}
pub async fn check_index(&self, config: &Config<'_>) -> Result<bool, ElasticsearchError> {
let response = self
.client
.indices()
.exists(IndicesExistsParts::Index(&[config.index]))
.send()
.await
.map_err(ElasticsearchError::InitIndexError)?;
Ok(response.status_code() == 200)
}
pub async fn clear_index(&self, config: &Config<'_>) -> Result<(), ElasticsearchError> {
let response = self
.client
.indices()
.delete(IndicesDeleteParts::Index(&[config.index]))
.send()
.await
.map_err(ElasticsearchError::InitIndexError)?;
dbg!(response)
.exception()
.await
.map_err(ElasticsearchError::ClientError)?
.map(ElasticsearchError::PushResponseError)
.map_or(Ok(()), Err)
}
pub async fn write_alias(
&self,
config: &Config<'_>,
index: &str,
alias: &str,
) -> Result<(), ElasticsearchError> {
// delete old alias
info!("Try deletig old alias");
let response = self.client.indices().get_alias(IndicesGetAliasParts::Name(&[alias])).send().await
.map_err(ElasticsearchError::InitIndexError)?;
let indices = response.json::<HashMap<String,Value>>().await.map_err(ElasticsearchError::InitIndexError)?.keys().cloned().collect::<Vec<String>>();
self
.client
.indices()
.delete_alias(IndicesDeleteAliasParts::IndexName(&indices.iter().map(AsRef::as_ref).collect::<Vec<_>>(), &[alias]))
.send()
.await
.map_err(ElasticsearchError::InitIndexError)?;
// put new alias
info!("Putting new alias");
let response = self
.client
.indices()
.put_alias(IndicesPutAliasParts::IndexName(&[index], alias))
.send()
.await
.map_err(ElasticsearchError::InitIndexError)?;
dbg!(response)
.exception()
.await
.map_err(ElasticsearchError::ClientError)?
.map(ElasticsearchError::PushResponseError)
.map_or(Ok(()), Err)
}
}
#[derive(Debug)]
pub struct Config<'a> {
pub index: &'a str,
pub exists_strategy: ExistsStrategy,
}
arg_enum! {
/// Different strategies to deal with eisting indices
/// Abort: cancel push, return with an error
/// Ignore: Reuse existing index, appending new data
/// Recreate: Drop the existing index and start with a new one
#[derive(Debug, Clone, Copy)]
pub enum ExistsStrategy {
Abort,
Ignore,
Recreate,
}
}
#[cfg(test)]
mod tests {
use std::path::Path;
use super::*;
use crate::{
data::{self, import::Kind},
process_flake,
};
#[tokio::test]
async fn test_delete() -> Result<(), Box<dyn std::error::Error>> {
let es = Elasticsearch::new("http://localhost:9200").unwrap();
let config = &Config {
index: "flakes_index",
exists_strategy: ExistsStrategy::Ignore,
};
es.ensure_index(config).await?;
es.clear_index(config).await?;
let exists = es.check_index(config).await?;
assert!(!exists);
Ok(())
}
#[tokio::test]
async fn test_init() -> Result<(), Box<dyn std::error::Error>> {
let es = Elasticsearch::new("http://localhost:9200").unwrap();
let config = &Config {
index: "flakes_index",
exists_strategy: ExistsStrategy::Recreate,
};
es.ensure_index(config).await?;
let exists = es.check_index(config).await?;
assert!(exists, "Index should exist");
Ok(())
}
#[tokio::test]
async fn test_push() -> Result<(), Box<dyn std::error::Error>> {
let sources: Vec<data::Source> =
data::Source::read_sources_file(Path::new("./examples/examples.in.json"))?;
let exports = sources
.iter()
.flat_map(|s| process_flake(s, &Kind::All, false, &[]))
.flatten()
.collect::<Vec<Export>>();
println!("{}", serde_json::to_string(&exports[1]).unwrap());
let es = Elasticsearch::new("http://localhost:9200").unwrap();
let config = &Config {
index: "flakes_index",
exists_strategy: ExistsStrategy::Recreate,
};
es.ensure_index(config).await?;
es.push_exports(config, &exports).await?;
Ok(())
}
#[tokio::test]
async fn test_abort_if_index_exists() -> Result<(), Box<dyn std::error::Error>> {
let es = Elasticsearch::new("http://localhost:9200").unwrap();
let config = &Config {
index: "flakes_index",
exists_strategy: ExistsStrategy::Abort,
};
es.ensure_index(&Config {
exists_strategy: ExistsStrategy::Ignore,
..*config
})
.await?;
assert!(matches!(
es.ensure_index(config).await,
Err(ElasticsearchError::IndexExistsError(_)),
));
es.clear_index(config).await?;
Ok(())
}
}

52
flake-info/src/lib.rs Normal file
View file

@ -0,0 +1,52 @@
#![recursion_limit = "256"]
use std::path::PathBuf;
use anyhow::Result;
use data::{import::Kind, Export, Source};
pub mod commands;
pub mod data;
pub mod elastic;
pub use commands::get_flake_info;
pub fn process_flake(
source: &Source,
kind: &data::import::Kind,
temp_store: bool,
extra: &[String],
) -> Result<Vec<Export>> {
let mut info = commands::get_flake_info(source.to_flake_ref(), temp_store, extra)?;
info.source = Some(source.clone());
let packages = commands::get_derivation_info(source.to_flake_ref(), *kind, temp_store, extra)?;
eprintln!("{:#?}", info);
eprintln!("{:#?}", packages);
let exports: Vec<Export> = packages
.into_iter()
.map(|p| Export::flake(info.clone(), p))
.collect();
Ok(exports)
}
pub fn process_nixpkgs(nixpkgs: &Source, kind: &Kind) -> Result<Vec<Export>, anyhow::Error> {
let drvs = if matches!(kind, Kind::All | Kind::Package) {
commands::get_nixpkgs_info(nixpkgs.to_flake_ref())?
} else {
Vec::new()
};
let mut options = if matches!(kind, Kind::All | Kind::Option) {
commands::get_nixpkgs_options(nixpkgs.to_flake_ref())?
} else {
Vec::new()
};
let mut all = drvs;
all.append(&mut options);
let exports = all.into_iter().map(Export::nixpkgs).collect();
Ok(exports)
}

View file

@ -21,11 +21,22 @@
packages = system:
{
import_scripts = mkPackage ./import-scripts system;
flake_info = mkPackage ./flake-info system;
frontend = mkPackage ./. system;
};
devShell = system:
nixpkgs.legacyPackages.${system}.mkShell {
inputsFrom = builtins.attrValues (packages system);
shellHook = ''
# undo import_scripts' shell hook
cd ..
'';
};
in
{
defaultPackage = forAllSystems (mkPackage ./.);
packages = forAllSystems packages;
};
{
defaultPackage = forAllSystems (mkPackage ./.);
packages = forAllSystems packages;
devShell = forAllSystems devShell;
};
}

View file

@ -3,5 +3,6 @@ final: prev:
nixos-search = {
frontend = import ./. { pkgs = prev; };
import_scripts = import ./import-scripts { pkgs = prev; };
flake_info = import ./flake-info { pkgs = prev; };
};
}

View file

@ -29,6 +29,7 @@ import Html.Attributes
import Page.Home
import Page.Options
import Page.Packages
import Page.Flakes
import Route
import Search
import Url
@ -59,6 +60,7 @@ type Page
| Home Page.Home.Model
| Packages Page.Packages.Model
| Options Page.Options.Model
| Flakes Page.Flakes.Model
init :
@ -93,6 +95,7 @@ type Msg
| HomeMsg Page.Home.Msg
| PackagesMsg Page.Packages.Msg
| OptionsMsg Page.Options.Msg
| FlakesMsg Page.Flakes.Msg
updateWith :
@ -234,6 +237,21 @@ changeRouteTo currentModel url =
|> avoidReinit
|> attemptQuery
Route.Flakes searchArgs ->
let
modelPage =
case model.page of
Flakes x ->
Just x
_ ->
Nothing
in
Page.Flakes.init searchArgs modelPage
|> updateWith Flakes FlakesMsg model
|> avoidReinit
|> attemptQuery
update : Msg -> Model -> ( Model, Cmd Msg )
update msg model =
@ -296,6 +314,9 @@ view model =
Options _ ->
"NixOS Search - Options"
Flakes _ ->
"NixOS Search - Flakes (Experimental)"
_ ->
"NixOS Search"
in
@ -362,6 +383,7 @@ viewNavigation route =
(viewNavigationItem route)
[ ( toRoute Route.Packages, "Packages" )
, ( toRoute Route.Options, "Options" )
--, ( toRoute Route.Flakes, "Flakes (Experimental)" )
]
@ -390,7 +412,8 @@ viewPage model =
Options optionsModel ->
Html.map (\m -> OptionsMsg m) <| Page.Options.view optionsModel
Flakes flakesModel ->
Html.map (\m -> FlakesMsg m) <| Page.Flakes.view flakesModel
-- SUBSCRIPTIONS

297
src/Page/Flakes.elm Normal file
View file

@ -0,0 +1,297 @@
module Page.Flakes exposing (Model, Msg, init, update, view)
import Browser.Navigation
import Html exposing (Html, a, code, div, li, pre, strong, text, ul)
import Html.Attributes exposing (class, classList, href, target)
import Html.Events exposing (onClick)
import Html.Parser
import Html.Parser.Util
import Json.Decode
import Route
import Search
-- MODEL
type alias Model =
Search.Model ResultItemSource ResultAggregations
type alias ResultItemSource =
{ name : String
, description : Maybe String
, type_ : Maybe String
, default : Maybe String
, example : Maybe String
, source : Maybe String
}
type alias ResultAggregations =
{ all : AggregationsAll
}
type alias AggregationsAll =
{ doc_count : Int
}
init : Route.SearchArgs -> Maybe Model -> ( Model, Cmd Msg )
init searchArgs model =
let
( newModel, newCmd ) =
Search.init searchArgs model
in
( newModel
, Cmd.map SearchMsg newCmd
)
-- UPDATE
type Msg
= SearchMsg (Search.Msg ResultItemSource ResultAggregations)
update :
Browser.Navigation.Key
-> Msg
-> Model
-> ( Model, Cmd Msg )
update navKey msg model =
case msg of
SearchMsg subMsg ->
let
( newModel, newCmd ) =
Search.update
Route.Options
navKey
subMsg
model
in
( newModel, Cmd.map SearchMsg newCmd )
-- VIEW
view : Model -> Html Msg
view model =
Search.view { toRoute = Route.Options, categoryName = "options" }
[ text "Search more than "
, strong [] [ text "10 000 options" ]
]
model
viewSuccess
viewBuckets
SearchMsg
viewBuckets :
Maybe String
-> Search.SearchResult ResultItemSource ResultAggregations
-> List (Html Msg)
viewBuckets _ _ =
[]
viewSuccess :
String
-> Bool
-> Maybe String
-> List (Search.ResultItem ResultItemSource)
-> Html Msg
viewSuccess channel showNixOSDetails show hits =
ul []
(List.map
(viewResultItem channel showNixOSDetails show)
hits
)
viewResultItem :
String
-> Bool
-> Maybe String
-> Search.ResultItem ResultItemSource
-> Html Msg
viewResultItem channel _ show item =
let
showHtml value =
case Html.Parser.run value of
Ok nodes ->
Html.Parser.Util.toVirtualDom nodes
Err _ ->
[]
default =
"Not given"
asPre value =
pre [] [ text value ]
asPreCode value =
div [] [ pre [] [ code [ class "code-block" ] [ text value ] ] ]
githubUrlPrefix branch =
"https://github.com/NixOS/nixpkgs/blob/" ++ branch ++ "/"
cleanPosition value =
if String.startsWith "source/" value then
String.dropLeft 7 value
else
value
asGithubLink value =
case Search.channelDetailsFromId channel of
Just channelDetails ->
a
[ href <| githubUrlPrefix channelDetails.branch ++ (value |> String.replace ":" "#L")
, target "_blank"
]
[ text value ]
Nothing ->
text <| cleanPosition value
withEmpty wrapWith maybe =
case maybe of
Nothing ->
asPre default
Just "" ->
asPre default
Just value ->
wrapWith value
wrapped wrapWith value =
case value of
"" ->
wrapWith <| "\"" ++ value ++ "\""
_ ->
wrapWith value
showDetails =
if Just item.source.name == show then
div [ Html.Attributes.map SearchMsg Search.trapClick ]
[ div [] [ text "Name" ]
, div [] [ wrapped asPreCode item.source.name ]
, div [] [ text "Description" ]
, div [] <|
(item.source.description
|> Maybe.map showHtml
|> Maybe.withDefault []
)
, div [] [ text "Default value" ]
, div [] [ withEmpty (wrapped asPreCode) item.source.default ]
, div [] [ text "Type" ]
, div [] [ withEmpty asPre item.source.type_ ]
, div [] [ text "Example" ]
, div [] [ withEmpty (wrapped asPreCode) item.source.example ]
, div [] [ text "Declared in" ]
, div [] [ withEmpty asGithubLink item.source.source ]
]
|> Just
else
Nothing
toggle =
SearchMsg (Search.ShowDetails item.source.name)
isOpen =
Just item.source.name == show
in
li
[ class "option"
, classList [ ( "opened", isOpen ) ]
, Search.elementId item.source.name
]
<|
List.filterMap identity
[ Just <|
Html.a
[ class "search-result-button"
, onClick toggle
, href ""
]
[ text item.source.name ]
, showDetails
]
-- API
makeRequest :
Search.Options
-> String
-> String
-> Int
-> Int
-> Maybe String
-> Search.Sort
-> Cmd Msg
makeRequest options channel query from size _ sort =
Search.makeRequest
(Search.makeRequestBody
(String.trim query)
from
size
sort
"option"
"option_name"
[]
[]
[]
"option_name"
[ ( "option_name", 6.0 )
, ( "option_name_query", 3.0 )
, ( "option_description", 1.0 )
]
)
channel
decodeResultItemSource
decodeResultAggregations
options
Search.QueryResponse
(Just "query-options")
|> Cmd.map SearchMsg
-- JSON
decodeResultItemSource : Json.Decode.Decoder ResultItemSource
decodeResultItemSource =
Json.Decode.map6 ResultItemSource
(Json.Decode.field "option_name" Json.Decode.string)
(Json.Decode.field "option_description" (Json.Decode.nullable Json.Decode.string))
(Json.Decode.field "option_type" (Json.Decode.nullable Json.Decode.string))
(Json.Decode.field "option_default" (Json.Decode.nullable Json.Decode.string))
(Json.Decode.field "option_example" (Json.Decode.nullable Json.Decode.string))
(Json.Decode.field "option_source" (Json.Decode.nullable Json.Decode.string))
decodeResultAggregations : Json.Decode.Decoder ResultAggregations
decodeResultAggregations =
Json.Decode.map ResultAggregations
(Json.Decode.field "all" decodeResultAggregationsAll)
decodeResultAggregationsAll : Json.Decode.Decoder AggregationsAll
decodeResultAggregationsAll =
Json.Decode.map AggregationsAll
(Json.Decode.field "doc_count" Json.Decode.int)

View file

@ -289,7 +289,7 @@ makeRequest options channel query from size _ sort =
, ( "option_description", 1.0 )
]
)
("latest-" ++ String.fromInt options.mappingSchemaVersion ++ "-" ++ channel)
channel
decodeResultItemSource
decodeResultAggregations
options

View file

@ -40,6 +40,7 @@ import Regex
import Route
import Search
import Utils
import Search exposing (channelDetailsFromId)
@ -153,6 +154,15 @@ init searchArgs model =
)
platforms: List String
platforms =
[ "x86_64-linux"
, "aarch64-linux"
, "i686-linux"
, "x86_64-darwin"
, "aarch64-darwin"
]
-- UPDATE
@ -240,11 +250,14 @@ viewBuckets bucketsAsString result =
selectedBucket.maintainers
|> viewBucket
"Platforms"
(result.aggregations.package_platforms.buckets |> sortBuckets)
(result.aggregations.package_platforms.buckets |> sortBuckets |> filterPlatformsBucket)
(createBucketsMsg .platforms (\s v -> { s | platforms = v }))
selectedBucket.platforms
filterPlatformsBucket : List {a | key : String} -> List {a | key : String}
filterPlatformsBucket = List.filter (\a -> List.member a.key platforms)
viewBucket :
String
-> List Search.AggregationsBucketItem
@ -574,7 +587,7 @@ makeRequest :
-> Search.Sort
-> Cmd Msg
makeRequest options channel query from size maybeBuckets sort =
let
let
currentBuckets =
initBuckets maybeBuckets
@ -647,7 +660,7 @@ makeRequest options channel query from size maybeBuckets sort =
, ( "package_longDescription", 1.0 )
]
)
("latest-" ++ String.fromInt options.mappingSchemaVersion ++ "-" ++ channel)
channel
decodeResultItemSource
decodeResultAggregations
options
@ -689,13 +702,23 @@ decodeResultItemSource =
|> Json.Decode.Pipeline.required "package_longDescription" (Json.Decode.nullable Json.Decode.string)
|> Json.Decode.Pipeline.required "package_license" (Json.Decode.list decodeResultPackageLicense)
|> Json.Decode.Pipeline.required "package_maintainers" (Json.Decode.list decodeResultPackageMaintainer)
|> Json.Decode.Pipeline.required "package_platforms" (Json.Decode.list Json.Decode.string)
|> Json.Decode.Pipeline.required "package_platforms" (Json.Decode.map filterPlatforms (Json.Decode.list Json.Decode.string))
|> Json.Decode.Pipeline.required "package_position" (Json.Decode.nullable Json.Decode.string)
|> Json.Decode.Pipeline.required "package_homepage" decodeHomepage
|> Json.Decode.Pipeline.required "package_system" Json.Decode.string
|> Json.Decode.Pipeline.required "package_hydra" (Json.Decode.nullable (Json.Decode.list decodeResultPackageHydra))
filterPlatforms : List String -> List String
filterPlatforms =
let
flip : (a -> b -> c) -> b -> a -> c
flip function argB argA =
function argA argB
in
List.filter (flip List.member platforms)
decodeHomepage : Json.Decode.Decoder (List String)
decodeHomepage =
Json.Decode.oneOf

View file

@ -77,6 +77,7 @@ type Route
| Home
| Packages SearchArgs
| Options SearchArgs
| Flakes SearchArgs
parser : Url.Url -> Url.Parser.Parser (Route -> msg) msg
@ -86,6 +87,7 @@ parser url =
, Url.Parser.map NotFound <| Url.Parser.s "not-found"
, Url.Parser.map Packages <| Url.Parser.s "packages" </> searchQueryParser url
, Url.Parser.map Options <| Url.Parser.s "options" </> searchQueryParser url
, Url.Parser.map Flakes <| Url.Parser.s "flakes" </> searchQueryParser url
]
@ -143,3 +145,7 @@ routeToPieces page =
Options searchArgs ->
searchArgsToUrl searchArgs
|> (\( query, raw ) -> ( [ "options" ], query, raw ))
Flakes searchArgs ->
searchArgsToUrl searchArgs
|> (\( query, raw ) -> ( [ "flakes" ], query, raw ))

View file

@ -395,14 +395,13 @@ channelDetails : Channel -> ChannelDetails
channelDetails channel =
case channel of
Unstable ->
ChannelDetails "unstable" "unstable" "nixos/trunk-combined" "nixos-unstable"
ChannelDetails "unstable" "unstable" "nixos/trunk-combined" "nixpkgs-unstable"
Release_20_09 ->
ChannelDetails "20.09" "20.09" "nixos/release-20.09" "nixos-20.09"
ChannelDetails "20.09" "20.09" "nixos/release-20.09" "nixpkgs-20.09"
Release_21_05 ->
ChannelDetails "21.05" "21.05" "nixos/release-21.05" "nixos-21.05"
ChannelDetails "21.05" "21.05" "nixos/release-21.05" "nixpkgs-21.05"
channelFromId : String -> Maybe Channel
channelFromId channel_id =
@ -457,6 +456,9 @@ toAggregations bucketsFields =
[ ( "field"
, Json.Encode.string field
)
, ( "size"
, Json.Encode.int 20
)
]
)
]
@ -477,7 +479,7 @@ toAggregations bucketsFields =
)
]
in
( "aggregations"
( "aggs"
, Json.Encode.object <|
List.append fields allFields
)
@ -1136,7 +1138,10 @@ makeRequest :
-> (RemoteData.WebData (SearchResult a b) -> Msg a b)
-> Maybe String
-> Cmd (Msg a b)
makeRequest body index decodeResultItemSource decodeResultAggregations options responseMsg tracker =
makeRequest body channel decodeResultItemSource decodeResultAggregations options responseMsg tracker =
let branch = Maybe.map (\details -> details.branch) (channelDetailsFromId channel) |> Maybe.withDefault ""
index = "latest-" ++ String.fromInt options.mappingSchemaVersion ++ "-" ++ branch
in
Http.riskyRequest
{ method = "POST"
, headers =