Remove import/export scripts

This commit is contained in:
ppom 2025-02-17 12:00:00 +01:00
commit ca8656148a
5 changed files with 12 additions and 463 deletions

View file

@ -1,126 +0,0 @@
package main
import (
"bufio"
"encoding/gob"
"encoding/json"
"io"
"os"
"strings"
"time"
)
func quit(msg string, err error) {
if err == nil {
print(msg)
} else {
print(msg, err)
}
os.Exit(1)
}
type SF struct{ S, F string }
type LogEntry struct {
T time.Time
S int64
// This is a "\x00" Joined string
// which contains all matches on a line.
Pattern string
Stream, Filter string
SF int
Exec bool
}
type JsonEntry struct {
Time int64 `json:"time"` // unix epoch
Stream string `json:"stream"`
Filter string `json:"filter"`
Match []string `json:"match"`
Exec bool `json:"exec"`
}
func export(oldpath, newpath string) {
// Read DB
file, err := os.Open(oldpath)
if err != nil {
quit("could not open db: ", err)
}
dec := gob.NewDecoder(file)
// Write export
fileNew, err := os.Create(newpath)
if err != nil {
quit("could not create export: ", err)
}
enc := json.NewEncoder(fileNew)
malformedEntries := 0
discardedEntries := 0
readSF2int := make(map[int]SF)
for {
var entry LogEntry
err := dec.Decode(&entry)
if err != nil {
if err == io.EOF {
break
}
malformedEntries++
continue
}
if entry.Stream == "" && entry.Filter == "" {
sf, ok := readSF2int[entry.SF]
if !ok {
discardedEntries++
continue
}
entry.Stream = sf.S
entry.Filter = sf.F
}
if entry.SF != 0 {
readSF2int[entry.SF] = SF{entry.Stream, entry.Filter}
}
if entry.T.IsZero() {
entry.T = time.Unix(entry.S, 0)
}
jsonEntry := JsonEntry{
entry.T.Unix(),
entry.Stream,
entry.Filter,
strings.Split(entry.Pattern, "\x00"),
entry.Exec,
}
enc.Encode(jsonEntry)
}
if discardedEntries > 0 {
println(discardedEntries, "discarded entries")
}
if malformedEntries > 0 {
println(malformedEntries, "malformed entries")
}
}
func main() {
println("This export script must run in reaction's runtime directory.")
println("This usually is /var/lib/reaction.")
println("It will export the go / reaction-v1.x database files as JSON.")
println("Do you want to proceed? (y/n)")
reader := bufio.NewReader(os.Stdin)
line, err := reader.ReadString('\n')
if err != nil {
quit("fatal: could not read user input.", nil)
} else if line != "y\n" {
quit("user did not type `y`, quitting.", nil)
}
export("./reaction-matches.db", "./reaction-matches.export.json")
export("./reaction-flushes.db", "./reaction-flushes.export.json")
}

View file

@ -1,106 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "bincode"
version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad"
dependencies = [
"serde",
]
[[package]]
name = "import-rust-db"
version = "0.1.0"
dependencies = [
"bincode",
"serde",
"serde_json",
]
[[package]]
name = "itoa"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
[[package]]
name = "memchr"
version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "proc-macro2"
version = "1.0.88"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c3a7fc5db1e57d5a779a352c8cdb57b29aa4c40cc69c3a68a7fedc815fbf2f9"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af"
dependencies = [
"proc-macro2",
]
[[package]]
name = "ryu"
version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
[[package]]
name = "serde"
version = "1.0.210"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.210"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.132"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03"
dependencies = [
"itoa",
"memchr",
"ryu",
"serde",
]
[[package]]
name = "syn"
version = "2.0.82"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83540f837a8afc019423a8edb95b52a8effe46957ee402287f4292fae35be021"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe"

View file

@ -1,9 +0,0 @@
[package]
name = "import-rust-db"
version = "0.1.0"
edition = "2021"
[dependencies]
bincode = "1.3.3"
serde = { version = "1.0.210", features = ["derive"]}
serde_json = "1.0.132"

View file

@ -1,146 +0,0 @@
use std::{
collections::BTreeMap,
error::Error,
fs::File,
io::{self, BufReader, BufWriter, Write},
process::exit,
};
use bincode::Options;
use serde::{Deserialize, Serialize};
use serde_json::Deserializer;
// use serde_json::deserialize_from;
const DB: &str = "./reaction-matches";
const FLUSH: &str = "./reaction-flushes";
const NORMAL: &str = ".db";
const OLD: &str = ".db.old";
const EXPORT: &str = ".export.json";
type E = Box<dyn Error>;
fn main() {
if let Err(err) = lil_main() {
println!("fatal: {err}");
exit(1);
}
}
fn lil_main() -> Result<(), E> {
println!("Hello from Rust!");
println!("You're about to reimport the previously exported database.");
println!("This will move the old database files as .old");
println!("When the process completes, you'll be able to run reaction-v2.");
println!(
"If reaction-v2 runs as you wish, you'll be free to delete the .old & .export.json files"
);
println!("Do you want to continue? (y/n)");
let mut buffer = String::new();
io::stdin()
.read_line(&mut buffer)
.map_err(|err| format!("could not read user input: {err}"))?;
if buffer != "y\n" {
return Err("user did not type `y`, exiting.".into());
}
std::fs::rename(format!("{DB}{NORMAL}"), format!("{DB}{OLD}"))?;
std::fs::rename(format!("{FLUSH}{NORMAL}"), format!("{FLUSH}{OLD}"))?;
import(&format!("{DB}{EXPORT}"), &format!("{DB}{NORMAL}"))?;
import(&format!("{FLUSH}{EXPORT}"), &format!("{FLUSH}{NORMAL}"))?;
Ok(())
}
fn import(json_path: &str, write_path: &str) -> Result<(), E> {
let json_file = BufReader::new(File::open(json_path)?);
let mut write_file = BufWriter::new(File::create(write_path)?);
let bin = bincode_options();
// Signature writing
if let Err(err) = write_file.write_all(DB_SIGNATURE.as_bytes()) {
return Err(format!("Failed to write to DB: {}", err).into());
}
let header = collect_stream_filters(json_path)?;
let database_header: DatabaseHeader =
header.iter().map(|(k, v)| (v.clone(), k.clone())).collect();
bin.serialize_into(&mut write_file, &database_header)?;
let deserializer = Deserializer::from_reader(json_file);
for json_entry in deserializer.into_iter::<JsonEntry>() {
bin.serialize_into(
&mut write_file,
&ComputedLogEntry::from(json_entry?, &header)?,
)?;
}
write_file.flush()?;
Ok(())
}
fn collect_stream_filters(json_path: &str) -> Result<WriteHeader, E> {
let mut header = BTreeMap::new();
let mut count = 0;
let json_file = BufReader::new(File::open(json_path)?);
let deserializer = Deserializer::from_reader(json_file);
for json_entry in deserializer.into_iter::<JsonEntry>() {
let json_entry = json_entry?;
let tuple = (json_entry.stream, json_entry.filter);
if header.get(&tuple).is_none() {
header.insert(tuple, count);
count += 1;
}
}
Ok(header)
}
#[derive(Debug, Deserialize)]
struct JsonEntry {
time: i64,
stream: String,
filter: String,
#[serde(rename = "match")]
match_: Vec<String>,
exec: bool,
}
// Pasted from main code
const DB_SIGNATURE: &str = "reaction-db-v01";
pub type BincodeOptions = bincode::config::WithOtherIntEncoding<
bincode::config::DefaultOptions,
bincode::config::VarintEncoding,
>;
pub fn bincode_options() -> BincodeOptions {
bincode::DefaultOptions::new().with_varint_encoding()
}
type DatabaseHeader = BTreeMap<usize, (String, String)>;
type WriteHeader = BTreeMap<(String, String), usize>;
#[derive(Debug, Serialize)]
struct ComputedLogEntry {
pub m: Vec<String>,
pub f: usize,
pub t: i64,
pub exec: bool,
}
impl ComputedLogEntry {
fn from(value: JsonEntry, header: &WriteHeader) -> Result<Self, E> {
match header.get(&(value.stream.clone(), value.filter.clone())) {
Some(f) => Ok(ComputedLogEntry {
m: value.match_,
f: *f,
t: value.time,
exec: value.exec,
}),
None => Err(format!("invalid filter: {value:?}").into()),
}
}
}

View file

@ -1,6 +1,5 @@
#!/usr/bin/env nix-shell
#!nix-shell -i python3 -p "python3.withPackages (ps: with ps; [ requests ])" -p debian-devscripts git minisign cargo-cross
import base64
#!nix-shell -i python3 -p "python3.withPackages (ps: with ps; [ requests ])" -p debian-devscripts git minisign cargo-cross rustup
import http.client
import json
import os
@ -26,7 +25,7 @@ def main():
tag = ""
try:
tag = cmd.stdout.strip().split("\n")[-1]
except:
except Exception:
pass
if tag == "":
print("could not retrieve last git tag.")
@ -38,8 +37,8 @@ def main():
sys.exit(1)
# Git push
cmd = subprocess.run(["git", "push", "--tags"])
quit_if(cmd)
# cmd = subprocess.run(["git", "push", "--tags"])
# quit_if(cmd)
cmd = subprocess.run(["rbw", "get", "minisign"], capture_output=True, text=True)
quit_if(cmd)
@ -48,21 +47,19 @@ def main():
all_files = []
architectures = [
("x86_64-unknown-linux-gnu", "amd64"),
"x86_64-unknown-linux-gnu",
# "x86_64-unknown-openbsd", # not supported by cross
("armv7-unknown-linux-gnueabihf", "arm"),
"armv7-unknown-linux-gnueabihf",
]
for archs in architectures:
go_arch = archs[1]
architecture = archs[0]
for architecture in architectures:
# Install toolchain
# cmd = subprocess.run([
# "rustup", "toolchain", "install", "stable",
# "-t", architecture,
# "--profile", "minimal"])
# quit_if(cmd)
cmd = subprocess.run([
"rustup", "toolchain", "install", "stable",
"-t", architecture,
"--profile", "minimal"])
quit_if(cmd)
# Build
cmd = subprocess.run([
@ -70,70 +67,9 @@ def main():
])
quit_if(cmd)
# Build rust import db
os.chdir("./import-rust-db")
cmd = subprocess.run([
"cross", "build", "--release", "--target", architecture
])
quit_if(cmd)
with open(f"./target/{architecture}/release/import-rust-db", "rb") as file:
rust_contents = base64.standard_b64encode(file.read())
os.chdir("..")
# Build go export db
os.chdir("./export-go-db")
cmd = subprocess.run(
["go", "build", "export-go-db.go"],
env=os.environ.update({"GOARCH": go_arch})
)
quit_if(cmd)
with open("./export-go-db", "rb") as file:
go_contents = base64.standard_b64encode(file.read())
os.chdir("..")
# Build glue script
contents = """
function main() {
set -eu -o pipefail
DIR="$(mktemp -d)"
echo "$GOBIN" | base64 -d > "$DIR/gobin"
echo "$RUBIN" | base64 -d > "$DIR/rubin"
chmod +x "$DIR/gobin" "$DIR/rubin"
"$DIR/gobin"
"$DIR/rubin"
rm "$DIR/gobin" "$DIR/rubin"
rmdir "$DIR"
}
GOBIN=""".encode() + go_contents + """
RUBIN=""".encode() + rust_contents + """
main
""".encode()
del go_contents
del rust_contents
with open(f"./target/{architecture}/release/migrate_reaction_db", "bw+") as file:
file.write(contents)
del contents
# # Build
# cmd = subprocess.run([
# "docker", "run", "-it", "--rm",
# "-e", "HOME=/tmp/",
# "-v", f"{os.getcwd()}:/tmp/code",
# "-w", "/tmp/code",
# "-u", str(os.getuid()),
# "rust",
# "make", f"reaction_{tag}-1_amd64.deb", "reaction", "ip46tables", "nft46"
# ])
# quit_if(cmd)
# File lists
binary_files = [
("reaction", architecture, f"reaction ({architecture})", "package"),
("migrate_reaction_db", architecture, f"db migration script ({architecture})", "package"),
("nft46", architecture, f"nft46 ({architecture})", "package"),
("ip46tables", architecture, f"ip46tables ({architecture})", "package"),
# (f"reaction_{tag}-1_amd64.deb", architecture, f"reaction.deb ({architecture})", "package")
@ -178,7 +114,7 @@ main
all_files.extend(sig_files)
# Copy only one time the text files, which are architecture-independant
if archs == architectures[-1]:
if architecture == architectures[-1]:
text_files = [
("reaction.bash", "", "bash completion file", "other"),
("reaction.fish", "", "fish completion file", "other"),