Inital commit

This commit is contained in:
hodasemi 2024-04-09 14:30:03 +02:00
commit 1412c8b051
48 changed files with 5611 additions and 0 deletions

11
cepitodb-master/.gitignore vendored Normal file
View file

@ -0,0 +1,11 @@
/target
/tmp
Cargo.lock
db_test_config.json
table_names_test.json
parameters.json
*.py
*.xlsx
*.exe

3
cepitodb-master/.gitmodules vendored Normal file
View file

@ -0,0 +1,3 @@
[submodule "python-cepi"]
path = python-cepi
url = git@gitlab.ptspaper.de:scs/phoenix/python-cepi.git

45
cepitodb-master/.vscode/launch.json vendored Normal file
View file

@ -0,0 +1,45 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "lldb",
"request": "launch",
"name": "Debug executable 'cepi_1_ddtrust'",
"cargo": {
"args": [
"build",
"--bin=cepi_1_ddtrust",
"--package=cepi_1_ddtrust"
],
"filter": {
"name": "cepi_1_ddtrust",
"kind": "bin"
}
},
"args": [],
"cwd": "${workspaceFolder}"
},
{
"type": "lldb",
"request": "launch",
"name": "Debug unit tests in executable 'cepi_1_ddtrust'",
"cargo": {
"args": [
"test",
"--no-run",
"--bin=cepi_1_ddtrust",
"--package=cepi_1_ddtrust"
],
"filter": {
"name": "cepi_1_ddtrust",
"kind": "bin"
}
},
"args": [],
"cwd": "${workspaceFolder}"
}
]
}

8
cepitodb-master/.vscode/settings.json vendored Normal file
View file

@ -0,0 +1,8 @@
{
"workbench.colorCustomizations": {
"activityBar.background": "#4E1F20",
"titleBar.activeBackground": "#6D2B2D",
"titleBar.activeForeground": "#FDFBFB"
},
"rust-analyzer.showUnlinkedFileNotification": false
}

View file

@ -0,0 +1,19 @@
[package]
name = "cepi_1_ddtrust"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
pyo3 = { version = "0.18.3", features = ["auto-initialize"] }
anyhow = { version = "1.0.70", features = ["backtrace"] }
mysql = "23.0.1"
serde = { version = "1.0.160", features = ["derive"] }
serde_json = "1.0.96"
chrono = { version = "0.4.24" }
rust-database = { git = "ssh://git@gitlab.ptspaper.de/scs/phoenix/rust-database" }
clap = { version = "4.3.2", features = ["derive"] }
[dev-dependencies]
test-case = "3.1.0"

View file

@ -0,0 +1,9 @@
# Doc
1. `git clone git@gitlab.ptspaper.de:scs/phoenix/cepitodb.git --recursive`
## On Submodule Update
2. `git submodule update --remote --merge`
3. In commit fassen
4. Pushen

View file

@ -0,0 +1,7 @@
{
"user": "klaus",
"password": "A6Uv2x3X3t1Korlynpky",
"url": "pts-mariadb1-ext",
"port": 3306,
"schema": "analyse"
}

View file

@ -0,0 +1,89 @@
{
"uebersicht": {
"probenvorbereitung_name": "Super-User",
"bezeichnung": "Super-User",
"probeneingang_datum": "PTS-User",
"probenuntersuchung_datum": "PTS-User",
"probenvorbereitung_datum": "PTS-User",
"probenuntersuchung_name": "Super-User",
"auftrag": "PTS-User",
"probeneingang_name": "Super-User",
"firma": "Super-User",
"labornummer": "PTS-User"
},
"nasslabor": {
"gerätetyp": "Premium-User",
"labor": "Premium-User",
"ars_filtrat_einwaage": "Premium-User",
"ars_filtrat_auswaage": "Premium-User",
"ars_filtrat_masse_schale": "Premium-User",
"ars_wasser_einwaage": "Premium-User",
"ars_wasser_auswaage": "Premium-User",
"ars_wasser_masse_schale": "Premium-User",
"csb_messbereich": "Premium-User",
"csb_verdünnung": "Premium-User",
"csb_messwert": "Premium-User",
"lf_wasser": "Premium-User",
"ph_wasser": "Premium-User",
"ph_filtrat": "Premium-User",
"zerfaserungsdauer": "Premium-User",
"einwaage": "Premium-User",
"umdrehungen": "Premium-User",
"bemerkung": "Premium-User",
"bestandteil": "Premium-User",
"masse": "Premium-User",
"trocken_entfernt": "Premium-User",
"tg_1": "Premium-User",
"tg_2": "Premium-User"
},
"nasslabor_fein": {
"labor": "PTS-User",
"faser_stippen_rückstand": "Standard-User",
"faser_stippen_rückstand_art_größe": "Standard-User",
"reject_filtergewicht": "Test-User",
"reject_auswaage": "Test-User",
"papierfremde_bestandteile": "Standard-User",
"papierfremde_art_größe": "Standard-User",
"papierfremde_material": "Standard-User",
"papierfremde_zerkleinerung": "Standard-User",
"volumen_af": "Standard-User",
"grammatur_af": "PTS-User"
},
"nasslabor_grob": {
"labor": "PTS-User",
"faser_stippen_rückstand_art_größe": "Standard-User",
"faser_stippen_rückstand": "Standard-User",
"papierfremde_zerkleinerung": "Standard-User",
"papierfremde_material": "Standard-User",
"papierfremde_art_größe": "Standard-User",
"papierfremde_bestandteile": "Standard-User",
"sd_einwaage": "Standard-User",
"sd_auswaage_ts": "Standard-User",
"sd_auswaage": "Standard-User",
"sd_filterblatt": "Standard-User",
"reject_filtergewicht": "Test-User",
"reject_auswaage": "Test-User",
"volumen_ac": "Standard-User",
"grammatur_ac": "PTS-User"
},
"blattklebetest": {
"bewertung_fein_bewertung": "PTS-User",
"bewertung_fein_bemerkung": "PTS-User",
"bewertung_fein_gesamt_bewertung": "Test-User",
"bewertung_fein_gesamt_bemerkung": "PTS-User",
"bewertung_grob_bewertung": "PTS-User",
"bewertung_grob_bemerkung": "PTS-User",
"bewertung_grob_gesamt_bewertung": "Standard-User",
"bewertung_grob_gesamt_bemerkung": "PTS-User"
},
"visuelle_beurteilung": {
"bewertung_fein_bewertung": "PTS-User",
"bewertung_fein_bemerkung": "PTS-User",
"bewertung_fein_gesamt_bewertung": "Test-User",
"bewertung_fein_gesamt_bemerkung": "PTS-User",
"bewertung_grob_bewertung": "PTS-User",
"bewertung_grob_bemerkung": "PTS-User",
"bewertung_grob_gesamt_bewertung": "Standard-User",
"bewertung_grob_gesamt_bemerkung": "PTS-User"
}
}

View file

@ -0,0 +1,6 @@
{
"configurations": "api_configurations",
"infos": "api_info",
"measurements": "api_measurements",
"results": "api_results"
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1 @@
pub mod cepi_22_8;

View file

@ -0,0 +1,200 @@
use std::collections::HashMap;
use anyhow::{anyhow, Result};
use rust_database::*;
use serde::{Deserialize, Serialize};
fn str_to_bool(s: &str) -> bool {
if s == "ja" {
true
} else {
false
}
}
fn check_none_str(s: &str) -> Option<String> {
if s == "None" {
None
} else {
Some(s.to_string())
}
}
pub struct VariableNames<'a> {
value_field_name: &'a str,
rights_field_name: &'a str,
}
impl<'a> From<&'a str> for VariableNames<'a> {
fn from(value: &'a str) -> Self {
Self {
value_field_name: value,
rights_field_name: value,
}
}
}
impl<'a> From<(&'a str, &'a str)> for VariableNames<'a> {
fn from((value_field_name, rights_field_name): (&'a str, &'a str)) -> Self {
Self {
value_field_name,
rights_field_name,
}
}
}
pub trait CombineRights: Sized {
fn combine<'a>(
variable_map: &HashMap<String, String>,
rights_map: &HashMap<String, Rights>,
variable_name: impl Into<VariableNames<'a>>,
) -> Result<Self>;
}
#[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)]
pub struct GuardedVariable<T: Clone> {
pub name: String,
pub value: T,
pub right: Rights,
}
impl<T: Clone> GuardedVariable<T> {
pub fn new(name: &str, value: T, right: Rights) -> Self {
Self {
name: name.to_string(),
value,
right,
}
}
}
impl GuardedVariable<f32> {
pub fn as_measurement<'a>(&'a self) -> Measurement<'a> {
Measurement {
name: &self.name,
value: self.value,
rights: Some(self.right),
}
}
}
impl CombineRights for GuardedVariable<bool> {
fn combine<'a>(
variable_map: &HashMap<String, String>,
rights_map: &HashMap<String, Rights>,
variable_name: impl Into<VariableNames<'a>>,
) -> Result<Self> {
let var_name = variable_name.into();
Ok(GuardedVariable::new(
var_name.value_field_name,
str_to_bool(variable_map.get(var_name.value_field_name).ok_or(anyhow!(
"Variable {} not found in value map",
var_name.value_field_name
))?),
*rights_map.get(var_name.rights_field_name).ok_or(anyhow!(
"Variable {} not found in rights",
var_name.rights_field_name
))?,
))
}
}
impl CombineRights for GuardedVariable<Option<String>> {
fn combine<'a>(
variable_map: &HashMap<String, String>,
rights_map: &HashMap<String, Rights>,
variable_name: impl Into<VariableNames<'a>>,
) -> Result<Self> {
let var_name = variable_name.into();
Ok(GuardedVariable::new(
var_name.value_field_name,
check_none_str(variable_map.get(var_name.value_field_name).ok_or(anyhow!(
"Variable {} not found in value map",
var_name.value_field_name
))?),
*rights_map.get(var_name.rights_field_name).ok_or(anyhow!(
"Variable {} not found in rights",
var_name.rights_field_name
))?,
))
}
}
impl CombineRights for GuardedVariable<String> {
fn combine<'a>(
variable_map: &HashMap<String, String>,
rights_map: &HashMap<String, Rights>,
variable_name: impl Into<VariableNames<'a>>,
) -> Result<Self> {
let var_name = variable_name.into();
Ok(GuardedVariable::new(
var_name.value_field_name,
variable_map
.get(var_name.value_field_name)
.ok_or(anyhow!(
"Variable {} not found in value map",
var_name.value_field_name,
))?
.clone(),
*rights_map.get(var_name.rights_field_name).ok_or(anyhow!(
"Variable {} not found in rights",
var_name.rights_field_name
))?,
))
}
}
impl CombineRights for GuardedVariable<f32> {
fn combine<'a>(
variable_map: &HashMap<String, String>,
rights_map: &HashMap<String, Rights>,
variable_name: impl Into<VariableNames<'a>>,
) -> Result<Self> {
let var_name = variable_name.into();
Ok(GuardedVariable::new(
var_name.value_field_name,
variable_map
.get(var_name.value_field_name)
.ok_or(anyhow!(
"Variable {} not found in value map",
var_name.value_field_name
))?
.parse()
.map_err(|err| anyhow!("{} for {}", err, var_name.value_field_name))?,
*rights_map.get(var_name.rights_field_name).ok_or(anyhow!(
"Variable {} not found in rights",
var_name.rights_field_name
))?,
))
}
}
#[cfg(test)]
mod test {
use super::{GuardedVariable, Rights};
use anyhow::Result;
use serde_json::{from_str, to_string_pretty};
#[test]
fn test_vec_serialisation() -> Result<()> {
let t = vec![
GuardedVariable::new("var1", "test123", Rights::PremiumUser),
GuardedVariable::new("var1", "test456", Rights::SuperUser),
GuardedVariable::new("var1", "test789", Rights::PTSUser),
];
let str = to_string_pretty(&t)?;
println!("{str}");
let r: Vec<GuardedVariable<&str>> = from_str(&str)?;
assert_eq!(t, r);
Ok(())
}
}

119
cepitodb-master/src/main.rs Normal file
View file

@ -0,0 +1,119 @@
mod cepi;
mod guarded_variable;
mod python;
use std::{collections::HashMap, fs::read_to_string};
use cepi::cepi_22_8::{Cepi22_8, CepiMeta};
use python::PythonObject;
use anyhow::{anyhow, Result};
use chrono::prelude::*;
use clap::Parser;
use rust_database::*;
use serde_json::{from_str, to_string};
use crate::cepi::cepi_22_8::CepiInfo;
/// Helper Program to get Cepi excel files into data base
#[derive(Parser, Debug)]
#[command(version, about)]
struct Args {
/// Input file (.xlsx)
#[arg(short, long)]
file: String,
}
fn main() -> Result<()> {
let args = Args::parse();
let file = args.file;
let cepi_reader = PythonObject::new("python-cepi/cepi_read_function.py")?;
let cepi_data = cepi_reader.execute_script(
"read",
&file,
|r: HashMap<String, HashMap<String, String>>| Ok(r),
)?;
println!("{:#?}", cepi_data);
if cepi_data.is_empty() {
return Err(anyhow!("Empty return from python (unsupported version?)"));
}
let cepi = Cepi22_8::new(cepi_data, "resources/parameters_Nutzungsrollen.json")?;
if cepi.version.is_empty() {
return Err(anyhow!("Could not read cepi file"));
}
let cepi_meta = CepiMeta::from(&cepi);
let meta_str = to_string(&cepi_meta)?;
let cepi_info = CepiInfo::from(&cepi);
let info_str = to_string(&cepi_info)?;
let meta_info = vec![
("Gerät", format!("Cepi {}", &cepi.version).as_str()).into(),
("MessungsMeta", &meta_str).into(),
("MessungsInfo", &info_str).into(),
];
let conf_str = read_to_string("resources/db_config.json")?;
let config = from_str(&conf_str)?;
let table_name_str = read_to_string("resources/table_names.json")?;
let table_names = from_str(&table_name_str)?;
let mut db = PTSDataBase::new(config, table_names)?;
// check that the entry is not already in db
if !db.check_for_infos(&meta_info)? {
let measurements = cepi.create_measurements();
let dt = NaiveDateTime::parse_from_str(
&cepi.uebersicht.probenuntersuchung_datum.value,
"%Y-%m-%d %H:%M:%S",
)?;
let unix_time_stamp = dt.timestamp();
let (config_id, result_id) =
db.write_measurement(&meta_info, unix_time_stamp, "Ok", &measurements)?;
println!("Added new entry ({:?}, {:?})", config_id, result_id);
} else {
println!("Already in DB!");
}
Ok(())
}
#[cfg(test)]
mod test {
use anyhow::Result;
use test_case::test_case;
use crate::python::PythonObject;
use std::collections::HashMap;
#[test_case("resources/CEPI-method_Laufzettel_Part I_V22.9_Adapack_Muster 1.xlsx")]
#[test_case("resources/CEPI-method_Laufzettel_Part I_V23.02_Cascogne_Muster1.xlsx")]
#[test_case("resources/CEPI-method_Laufzettel_Part I_V23.03_Intertape Packaging_Muster 1.xlsx")]
#[test_case("resources/CEPI-method_Laufzettel_Part I_V23.04_Cats Flexible Packaging_M1.xlsx")]
fn verify_cepi(file: &str) -> Result<()> {
let cepi_reader = PythonObject::new("python-cepi/cepi_read_function.py")?;
cepi_reader.execute_script(
"read",
&file,
|r: HashMap<String, HashMap<String, String>>| Ok(r),
)?;
Ok(())
}
}

View file

@ -0,0 +1,76 @@
use pyo3::{
prelude::*,
types::{PyModule, PyTuple},
};
use anyhow::Result;
pub enum PythonScript<'a> {
File(&'a [u8]),
Path(&'a str),
}
impl<'a> From<&'a [u8]> for PythonScript<'a> {
fn from(content: &'a [u8]) -> Self {
PythonScript::File(content)
}
}
impl<'a> From<&'a str> for PythonScript<'a> {
fn from(path: &'a str) -> Self {
PythonScript::Path(path)
}
}
pub struct PythonObject {
script: String,
}
impl PythonObject {
pub fn new<'a>(script: impl Into<PythonScript<'a>>) -> Result<Self> {
Ok(Self {
script: match script.into() {
PythonScript::File(file) => std::str::from_utf8(file)?.to_string(),
PythonScript::Path(path) => std::fs::read_to_string(path)?,
},
})
}
pub fn execute_script<D, F, O, I>(&self, function_name: &str, i: I, f: F) -> Result<O>
where
I: ToPyObject,
D: for<'a> FromPyObject<'a>,
F: FnOnce(D) -> Result<O>,
{
Ok(Python::with_gil(|py| -> Result<O> {
let module = PyModule::from_code(py, &self.script, "dummy", "dummy")?;
let args = PyTuple::new(py, [i]);
Ok(f(module.getattr(function_name)?.call1(args)?.extract()?)?)
})?)
}
}
#[cfg(test)]
mod test {
use anyhow::Result;
use std::collections::HashMap;
use super::PythonObject;
#[test]
fn verify_python() -> Result<()> {
let cepi_reader = PythonObject::new("resources/cepi_read_function.py")?;
let cepi_data = cepi_reader.execute_script(
"read",
"resources/dummy.xlsx",
|r: HashMap<String, HashMap<String, String>>| Ok(r),
)?;
println!("{:#?}", cepi_data);
Ok(())
}
}

71
database_Schema_dump.sql Normal file
View file

@ -0,0 +1,71 @@
/*
SQLyog Community v13.1.7 (64 bit)
MySQL - 10.6.4-MariaDB : Database - ptsdata
*********************************************************************
*/
/*!40101 SET NAMES utf8 */;
/*!40101 SET SQL_MODE=''*/;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
CREATE DATABASE /*!32312 IF NOT EXISTS*/`analyse` /*!40100 DEFAULT CHARACTER SET utf8mb3 */;
USE `analyse`;
DROP TABLE IF EXISTS `api_measurements`;
DROP TABLE IF EXISTS `api_results`;
DROP TABLE IF EXISTS `api_info`;
DROP TABLE IF EXISTS `api_configurations`;
/*Table structure for table `api_configurations` */
CREATE TABLE `api_configurations` (
`id` bigint(20) unsigned NOT NULL AUTO_INCREMENT PRIMARY KEY,
`time` bigint(20) unsigned NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3;
/*Data for the table `api_configurations` */
/*Table structure for table `api_info` */
CREATE TABLE `api_info` (
`id` bigint(20) unsigned NOT NULL AUTO_INCREMENT PRIMARY KEY,
`name` varchar(80) NOT NULL,
`value` text NOT NULL,
`config_id` bigint(20) unsigned NOT NULL,
CONSTRAINT
FOREIGN KEY (config_id) REFERENCES api_configurations (id)
ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3;
/*Data for the table `api_info` */
/*Table structure for table `api_results` */
CREATE TABLE `api_results` (
`id` bigint(20) unsigned NOT NULL AUTO_INCREMENT PRIMARY KEY,
`status` varchar(40) NOT NULL,
`time` bigint(20) unsigned NOT NULL,
`config_id` bigint(20) unsigned NOT NULL,
CONSTRAINT
FOREIGN KEY (config_id) REFERENCES api_configurations (id)
ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3;
/*Data for the table `api_results` */
/*Table structure for table `api_measurements` */
CREATE TABLE `api_measurements` (
`id` bigint(20) unsigned NOT NULL AUTO_INCREMENT PRIMARY KEY,
`name` varchar(100) NOT NULL,
`value` float NOT NULL,
`result_id` bigint(20) unsigned NOT NULL,
`rights` varchar(32),
CONSTRAINT
FOREIGN KEY (result_id) REFERENCES api_results (id)
ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3;
/*Data for the table `api_measurements` */
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;

165
ddtrust-backend-master/.gitignore vendored Normal file
View file

@ -0,0 +1,165 @@
# Created by https://www.toptal.com/developers/gitignore/api/node,visualstudiocode
# Edit at https://www.toptal.com/developers/gitignore?templates=node,visualstudiocode
### Node ###
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
### Node Patch ###
# Serverless Webpack directories
.webpack/
# Optional stylelint cache
# SvelteKit build / generate output
.svelte-kit
### VisualStudioCode ###
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
!.vscode/*.code-snippets
# Local History for Visual Studio Code
.history/
# Built Visual Studio Code Extensions
*.vsix
### VisualStudioCode Patch ###
# Ignore all local history of files
.history
.ionide
# End of https://www.toptal.com/developers/gitignore/api/node,visualstudiocode
package-lock.json

View file

@ -0,0 +1,7 @@
{
"workbench.colorCustomizations": {
"activityBar.background": "#19313B",
"titleBar.activeBackground": "#234452",
"titleBar.activeForeground": "#F7FAFC"
}
}

View file

@ -0,0 +1,406 @@
const express = require('express')
const mariadb = require('mariadb')
const fs = require('fs');
let db_settings = JSON.parse(fs.readFileSync('resources/db_settings.json', 'utf8'));
const app = express()
const pool = mariadb.createPool({
host: db_settings.host,
port: db_settings.port,
user: db_settings.user,
password: db_settings.password,
database: db_settings.database,
supportBigNumbers: true,
})
const PORT = 80
function convert_rights(rights) {
switch (rights) {
case "Super-User":
return 0;
case "PTS-User":
return 1;
case "Premium-User":
return 2;
case "Standard-User":
return 3;
case "Test-User":
return 4;
default:
throw new Error("Could not parse Rights (" + rights + ")");
}
}
// encapsulate db access
async function queryDB(query) {
let conn;
let rows;
try {
conn = await pool.getConnection();
rows = await conn.query(query);
} catch (err) {
throw err;
} finally {
if (conn) {
conn.end();
}
}
return rows;
}
async function queryDevice(device, incoming_rights) {
let config_ids = await queryDB(
"SELECT config_id " +
"FROM api_info " +
"WHERE name=\"Gerät\" AND value LIKE \"" + device + "\""
);
let meta_map = {};
for (let i = 0; i < config_ids.length; i++) {
const config_id = config_ids[i].config_id;
let metas = await queryDB(
"SELECT name, value " +
"FROM api_info " +
"WHERE config_id=\"" + config_id + "\""
);
let m = {};
m["data"] = {};
for (let j = 0; j < metas.length; j++) {
let value = metas[j].value;
let name = metas[j].name;
if (name == "Gerät") {
m["Gerät"] = value;
} else {
let values = JSON.parse(value);
for (let [key, value] of Object.entries(values)) {
if (Array.isArray(value)) {
for (let i = 0; i < value.length; i++) {
let v = value[i];
let rights = convert_rights(v.right);
if (rights < incoming_rights) {
delete values[key];
break;
}
}
} else {
if (value.right === undefined) {
for (let [inner_key, inner_value] of Object.entries(value)) {
if (Array.isArray(inner_value)) {
for (let i = 0; i < inner_value.length; i++) {
let v = inner_value[i];
for (let [inner_inner_key, inner_inner_value] of Object.entries(v)) {
let rights = convert_rights(inner_inner_value.right);
if (rights < incoming_rights) {
delete v[inner_inner_key];
}
}
}
}
else if (inner_value.right !== undefined) {
let rights = convert_rights(inner_value.right);
if (rights < incoming_rights) {
delete value[inner_key];
}
}
else {
for (let [inner_inner_key, inner_inner_value] of Object.entries(inner_value)) {
let rights = convert_rights(inner_inner_value.right);
if (rights < incoming_rights) {
delete inner_value[inner_inner_key];
}
}
}
}
} else {
let rights = convert_rights(value.right);
if (rights < incoming_rights) {
delete values[key];
}
}
}
}
let d = m["data"];
let l = Object.keys(d).length;
if (l != 0) {
m["data"] = { ...d, ...values };
} else {
m["data"] = values;
}
}
}
meta_map[config_id] = m;
}
for (let config_id of Object.keys(meta_map)) {
let results = await queryDB(
"SELECT id " +
"FROM api_results " +
"WHERE config_id=\"" + config_id + "\""
);
for (let j = 0; j < results.length; j++) {
let measurements = await queryDB(
"SELECT name, value, rights " +
"FROM api_measurements " +
"WHERE result_id=\"" + results[j].id + "\""
);
m = {}
for (let k = 0; k < measurements.length; k++) {
let measurement = measurements[k];
let rights = measurement.rights.replace(/['"]+/g, '');
let rights_value = convert_rights(rights);
if (rights_value >= incoming_rights) {
m[measurement.name] = { name: measurement.name, value: measurement.value, rights: rights };
}
}
meta_map[config_id][results[j].id] = m;
}
}
return meta_map;
}
async function calculate(elements, parameter, operation) {
switch (operation) {
case "average":
let tmp = 0;
let count = 0;
for (let param of parameter) {
const value = elements.find(element => element.name == param);
if (value === undefined) {
continue;
}
count += 1;
tmp += value.value;
}
if (count == 0) {
return null;
}
return tmp / count;
case "sum":
let sum = 0;
for (let param of parameter) {
const value = elements.find(element => element.name == param);
if (value === undefined) {
return null;
}
sum += value.value;
}
return sum;
case "divide":
const v1 = elements.find(element => element.name == parameter[0]);
const v2 = elements.find(element => element.name == parameter[1]);
if (v1 === undefined || v2 === undefined || v2 == 0) {
return null;
}
return v1.value / v2.value;
case "multiply":
let p = 1.0;
for (let param of parameter) {
const value = elements.find(element => element.name == param);
if (value === undefined) {
return null;
}
p *= value.value;
}
return p;
case "subtract":
const s1 = elements.find(element => element.name == parameter[0]);
const s2 = elements.find(element => element.name == parameter[1]);
if (s1 === undefined || s2 === undefined) {
return null;
}
return s1.value - s2.value;
case "discard":
const value = elements.find(element => element.name == parameter[0]);
const res = elements.find(element => element.name == parameter[1]);
if (value === undefined || res === undefined) {
return null;
}
if (value.value !== null && !value.value) {
return res.value;
} else {
return 0;
}
default:
throw "unknown operation " + operation;
}
}
async function createResultForEntry(parameters, descriptions, calculations) {
let result = {};
for (let [key, data] of Object.entries(parameters)) {
if (Object.keys(data).length == 0) {
return {};
}
if (key == "Gerät") {
continue;
}
let elements = findAll(data);
for (let [calc_key, calc_value] of Object.entries(calculations)) {
let val = null;
if (calc_value.const !== undefined) {
val = parseFloat(calc_value.const[0]);
} else {
val = await calculate(elements, calc_value.source, calc_value.operation);
}
if (val !== null) {
elements.push({ name: calc_key, value: val });
}
}
console.info(elements);
for (let element of elements) {
let key = String(element.name);
let value = element.value;
let description = descriptions[key];
if (description !== undefined) {
if (value !== null) {
result[key] = {
name: description.name,
value: value,
description: description.description,
unit: description.unit,
}
}
} else {
for (let [description_key, description] of Object.entries(descriptions)) {
if (key.startsWith(description_key)) {
result[key] = {
name: description.name,
value: value,
description: description.description,
unit: description.unit,
}
break;
}
}
}
}
}
return result;
}
async function createResultList(parameters, descriptions, calculations) {
let result = [];
for (let entry of Object.values(parameters)) {
let entry_result = await createResultForEntry(entry, descriptions, calculations);
if (Object.keys(entry_result).length != 0) {
result.push(entry_result);
}
}
return result;
}
function findAll(object) {
let list = [];
if (object === null) {
return list;
}
if (object.name !== undefined) {
if (object.value !== undefined) {
list.push({ name: object.name, value: object.value });
} else {
for (let [key, value] of Object.entries(object)) {
list = list.concat(findAll(value));
}
}
} else {
for (let [key, value] of Object.entries(object)) {
list = list.concat(findAll(value));
}
}
return list;
}
app.get('/ddtrust/cepi/:rights', async (request, response) => {
// read parameters from DB and only return accessible
let deviceParameters = await queryDevice("%Cepi%", convert_rights(request.params.rights));
// read paremeter description file
let descriptions = JSON.parse(fs.readFileSync('resources/variable_description.json', 'utf8'));
// read parameter calculation file,
let calculations = JSON.parse(fs.readFileSync('resources/variable_calculation.json', 'utf8'));
// combine description information with parameters queried from DB
let result = await createResultList(deviceParameters, descriptions, calculations);
response.json(result)
})
app.listen(PORT, () => {
console.log(`Server running on port ${PORT}`)
})

View file

@ -0,0 +1,20 @@
{
"name": "ddtrust-backend",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"start": "node index.js",
"test": "echo \"Error: no test specified\" && exit 1"
},
"repository": {
"type": "git",
"url": "git@gitlab.ptspaper.de:scs/ddtrust-backend.git"
},
"author": "",
"dependencies": {
"bson": "^5.3.0",
"express": "^4.18.2",
"mariadb": "^3.1.2"
}
}

View file

@ -0,0 +1,7 @@
{
"host": "pts-mariadb1-ext",
"port": "3306",
"user": "klaus",
"password": "A6Uv2x3X3t1Korlynpky",
"database": "analyse"
}

View file

@ -0,0 +1,728 @@
{
"var_otro": {
"const": [
"50"
]
},
"var_otro_pro_l": {
"const": [
"25"
]
},
"var_macrostickies": {
"const": [
"5"
]
},
"var_ten": {
"const": [
"10"
]
},
"var_mil": {
"const": [
"1000000"
]
},
"var_thousand": {
"const": [
"1000"
]
},
"var_hundred": {
"const": [
"100"
]
},
"var_twenty": {
"const": [
"20"
]
},
"var_one": {
"const": [
"1"
]
},
"var_one_point_eight": {
"const": [
"1.8"
]
},
"trockengehalt_mw_1": {
"source": [
"tg_1_1",
"tg_2_1"
],
"operation": "average"
},
"trockengehalt_mw_2": {
"source": [
"tg_1_2",
"tg_2_2"
],
"operation": "average"
},
"trockengehalt_mw_3": {
"source": [
"tg_1_3",
"tg_2_3"
],
"operation": "average"
},
"trockengehalt_mw_4": {
"source": [
"tg_1_4",
"tg_2_4"
],
"operation": "average"
},
"trockengehalt_mw_5": {
"source": [
"tg_1_5",
"tg_2_5"
],
"operation": "average"
},
"trockengehalt_mw_6": {
"source": [
"tg_1_6",
"tg_2_6"
],
"operation": "average"
},
"trockengehalt_mw_7": {
"source": [
"tg_1_7",
"tg_2_7"
],
"operation": "average"
},
"trockengehalt_mw_8": {
"source": [
"tg_1_8",
"tg_2_8"
],
"operation": "average"
},
"summe_massen": {
"source": [
"masse_1",
"masse_2",
"masse_3",
"masse_4",
"masse_5",
"masse_6",
"masse_7",
"masse_8"
],
"operation": "sum"
},
"anteil_an_gesamtprobe_1": {
"source": [
"masse_1",
"summe_massen"
],
"operation": "divide"
},
"anteil_an_gesamtprobe_2": {
"source": [
"masse_2",
"summe_massen"
],
"operation": "divide"
},
"anteil_an_gesamtprobe_3": {
"source": [
"masse_3",
"summe_massen"
],
"operation": "divide"
},
"anteil_an_gesamtprobe_4": {
"source": [
"masse_4",
"summe_massen"
],
"operation": "divide"
},
"anteil_an_gesamtprobe_5": {
"source": [
"masse_5",
"summe_massen"
],
"operation": "divide"
},
"anteil_an_gesamtprobe_6": {
"source": [
"masse_6",
"summe_massen"
],
"operation": "divide"
},
"anteil_an_gesamtprobe_7": {
"source": [
"masse_7",
"summe_massen"
],
"operation": "divide"
},
"anteil_an_gesamtprobe_8": {
"source": [
"masse_8",
"summe_massen"
],
"operation": "divide"
},
"anteil_ohne_trockenentf_1_discard": {
"source": [
"trocken_entfernt_1",
"anteil_an_gesamtprobe_1"
],
"operation": "discard"
},
"anteil_ohne_trockenentf_2_discard": {
"source": [
"trocken_entfernt_2",
"anteil_an_gesamtprobe_2"
],
"operation": "discard"
},
"anteil_ohne_trockenentf_3_discard": {
"source": [
"trocken_entfernt_3",
"anteil_an_gesamtprobe_3"
],
"operation": "discard"
},
"anteil_ohne_trockenentf_4_discard": {
"source": [
"trocken_entfernt_4",
"anteil_an_gesamtprobe_4"
],
"operation": "discard"
},
"anteil_ohne_trockenentf_5_discard": {
"source": [
"trocken_entfernt_5",
"anteil_an_gesamtprobe_5"
],
"operation": "discard"
},
"anteil_ohne_trockenentf_6_discard": {
"source": [
"trocken_entfernt_6",
"anteil_an_gesamtprobe_6"
],
"operation": "discard"
},
"anteil_ohne_trockenentf_7_discard": {
"source": [
"trocken_entfernt_7",
"anteil_an_gesamtprobe_7"
],
"operation": "discard"
},
"anteil_ohne_trockenentf_8_discard": {
"source": [
"trocken_entfernt_8",
"anteil_an_gesamtprobe_8"
],
"operation": "discard"
},
"summe_ohne_trockenentfernt": {
"source": [
"anteil_ohne_trockenentf_1_discard",
"anteil_ohne_trockenentf_2_discard",
"anteil_ohne_trockenentf_3_discard",
"anteil_ohne_trockenentf_4_discard",
"anteil_ohne_trockenentf_5_discard",
"anteil_ohne_trockenentf_6_discard",
"anteil_ohne_trockenentf_7_discard",
"anteil_ohne_trockenentf_8_discard"
],
"operation": "sum"
},
"anteil_ohne_trockenentf_1": {
"source": [
"anteil_ohne_trockenentf_1_discard",
"summe_ohne_trockenentfernt"
],
"operation": "divide"
},
"anteil_ohne_trockenentf_2": {
"source": [
"anteil_ohne_trockenentf_2_discard",
"summe_ohne_trockenentfernt"
],
"operation": "divide"
},
"anteil_ohne_trockenentf_3": {
"source": [
"anteil_ohne_trockenentf_3_discard",
"summe_ohne_trockenentfernt"
],
"operation": "divide"
},
"anteil_ohne_trockenentf_4": {
"source": [
"anteil_ohne_trockenentf_4_discard",
"summe_ohne_trockenentfernt"
],
"operation": "divide"
},
"anteil_ohne_trockenentf_5": {
"source": [
"anteil_ohne_trockenentf_5_discard",
"summe_ohne_trockenentfernt"
],
"operation": "divide"
},
"anteil_ohne_trockenentf_6": {
"source": [
"anteil_ohne_trockenentf_6_discard",
"summe_ohne_trockenentfernt"
],
"operation": "divide"
},
"anteil_ohne_trockenentf_7": {
"source": [
"anteil_ohne_trockenentf_7_discard",
"summe_ohne_trockenentfernt"
],
"operation": "divide"
},
"anteil_ohne_trockenentf_8": {
"source": [
"anteil_ohne_trockenentf_8_discard",
"summe_ohne_trockenentfernt"
],
"operation": "divide"
},
"einwaage_von_50g_otro_1_temp": {
"source": [
"anteil_ohne_trockenentf_1",
"var_otro"
],
"operation": "multiply"
},
"einwaage_von_50g_otro_2_temp": {
"source": [
"anteil_ohne_trockenentf_2",
"var_otro"
],
"operation": "multiply"
},
"einwaage_von_50g_otro_3_temp": {
"source": [
"anteil_ohne_trockenentf_3",
"var_otro"
],
"operation": "multiply"
},
"einwaage_von_50g_otro_4_temp": {
"source": [
"anteil_ohne_trockenentf_4",
"var_otro"
],
"operation": "multiply"
},
"einwaage_von_50g_otro_5_temp": {
"source": [
"anteil_ohne_trockenentf_5",
"var_otro"
],
"operation": "multiply"
},
"einwaage_von_50g_otro_6_temp": {
"source": [
"anteil_ohne_trockenentf_6",
"var_otro"
],
"operation": "multiply"
},
"einwaage_von_50g_otro_7_temp": {
"source": [
"anteil_ohne_trockenentf_7",
"var_otro"
],
"operation": "multiply"
},
"einwaage_von_50g_otro_8_temp": {
"source": [
"anteil_ohne_trockenentf_8",
"var_otro"
],
"operation": "multiply"
},
"einwaage_von_50g_otro_1": {
"source": [
"einwaage_von_50g_otro_1_temp",
"trockengehalt_mw_1"
],
"operation": "divide"
},
"einwaage_von_50g_otro_2": {
"source": [
"einwaage_von_50g_otro_2_temp",
"trockengehalt_mw_2"
],
"operation": "divide"
},
"einwaage_von_50g_otro_3": {
"source": [
"einwaage_von_50g_otro_3_temp",
"trockengehalt_mw_3"
],
"operation": "divide"
},
"einwaage_von_50g_otro_4": {
"source": [
"einwaage_von_50g_otro_4_temp",
"trockengehalt_mw_4"
],
"operation": "divide"
},
"einwaage_von_50g_otro_5": {
"source": [
"einwaage_von_50g_otro_5_temp",
"trockengehalt_mw_5"
],
"operation": "divide"
},
"einwaage_von_50g_otro_6": {
"source": [
"einwaage_von_50g_otro_6_temp",
"trockengehalt_mw_6"
],
"operation": "divide"
},
"einwaage_von_50g_otro_7": {
"source": [
"einwaage_von_50g_otro_7_temp",
"trockengehalt_mw_7"
],
"operation": "divide"
},
"einwaage_von_50g_otro_8": {
"source": [
"einwaage_von_50g_otro_8_temp",
"trockengehalt_mw_8"
],
"operation": "divide"
},
"summe_einwaage_von_50g-otro": {
"source": [
"einwaage_von_50g_otro_1",
"einwaage_von_50g_otro_2",
"einwaage_von_50g_otro_3",
"einwaage_von_50g_otro_4",
"einwaage_von_50g_otro_5",
"einwaage_von_50g_otro_6",
"einwaage_von_50g_otro_7",
"einwaage_von_50g_otro_8"
],
"operation": "sum"
},
"csb_g_pro_kg": {
"source": [
"csb_messwert",
"var_otro_pro_l"
],
"operation": "divide"
},
"csb_prozent": {
"source": [
"csb_g_pro_kg",
"var_ten"
],
"operation": "divide"
},
"ars_rückstand_probe_1": {
"source": [
"ars_filtrat1_auswaage",
"ars_filtrat1_masse_schale"
],
"operation": "subtract"
},
"ars_rückstand_probe_2": {
"source": [
"ars_filtrat2_auswaage",
"ars_filtrat2_masse_schale"
],
"operation": "subtract"
},
"ars_rückstand_wasser_1": {
"source": [
"ars_wasser1_auswaage",
"ars_wasser1_masse_schale"
],
"operation": "subtract"
},
"ars_rückstand_wasser_2": {
"source": [
"ars_wasser2_auswaage",
"ars_wasser2_masse_schale"
],
"operation": "subtract"
},
"ars_abdampfrückstand_probe_1": {
"source": [
"ars_rückstand_probe_1",
"ars_filtrat1_einwaage"
],
"operation": "divide"
},
"ars_abdampfrückstand_probe_2": {
"source": [
"ars_rückstand_probe_2",
"ars_filtrat2_einwaage"
],
"operation": "divide"
},
"ars_abdampfrückstand_wasser_1": {
"source": [
"ars_rückstand_wasser_1",
"ars_wasser1_einwaage"
],
"operation": "divide"
},
"ars_abdampfrückstand_wasser_2": {
"source": [
"ars_rückstand_wasser_2",
"ars_wasser2_einwaage"
],
"operation": "divide"
},
"ars_mittelwert_probe": {
"source": [
"ars_abdampfrückstand_probe_1",
"ars_abdampfrückstand_probe_2"
],
"operation": "average"
},
"ars_mittelwert_wasser": {
"source": [
"ars_abdampfrückstand_wasser_1",
"ars_abdampfrückstand_wasser_2"
],
"operation": "average"
},
"ars_abdampfrückstand_probe_mg_pro_g_temp": {
"source": [
"ars_mittelwert_probe",
"ars_mittelwert_wasser"
],
"operation": "subtract"
},
"ars_abdampfrückstand_probe_mg_pro_g_temp2": {
"source": [
"ars_abdampfrückstand_probe_mg_pro_g_temp",
"var_mil"
],
"operation": "multiply"
},
"ars_abdampfrückstand_probe_mg_pro_g": {
"source": [
"ars_abdampfrückstand_probe_mg_pro_g_temp2",
"var_otro_pro_l"
],
"operation": "divide"
},
"ars_abdampfrückstand_probe_%": {
"source": [
"ars_abdampfrückstand_probe_mg_pro_g",
"var_ten"
],
"operation": "divide"
},
"reject_grob_otro_g": {
"source": [
"reject_auswaage_grob",
"reject_filtergewicht_grob"
],
"operation": "subtract"
},
"reject_grob_otro_%_temp": {
"source": [
"reject_grob_otro_g",
"var_otro"
],
"operation": "divide"
},
"reject_grob_otro_%": {
"source": [
"reject_grob_otro_%_temp",
"var_hundred"
],
"operation": "multiply"
},
"reject_grob_excl_dry": {
"source": [
"summe_ohne_trockenentfernt",
"reject_grob_otro_%"
],
"operation": "multiply"
},
"reject_grob_incl_dry_temp1": {
"source": [
"var_one",
"summe_ohne_trockenentfernt"
],
"operation": "subtract"
},
"reject_grob_incl_dry_temp2": {
"source": [
"reject_grob_incl_dry_temp1",
"var_hundred"
],
"operation": "multiply"
},
"reject_grob_incl_dry": {
"source": [
"reject_grob_excl_dry",
"reject_grob_incl_dry_temp2"
],
"operation": "sum"
},
"reject_fein_otro_g": {
"source": [
"reject_auswaage_fein",
"reject_filtergewicht_fein"
],
"operation": "subtract"
},
"reject_fein_otro_%_temp": {
"source": [
"reject_fein_otro_g",
"var_twenty"
],
"operation": "divide"
},
"reject_fein_otro_%": {
"source": [
"reject_fein_otro_%_temp",
"var_hundred"
],
"operation": "multiply"
},
"reject_related_total_temp1": {
"source": [
"var_hundred",
"reject_grob_otro_%"
],
"operation": "subtract"
},
"reject_related_total_temp2": {
"source": [
"reject_fein_otro_%_temp",
"reject_related_total_temp1"
],
"operation": "multiply"
},
"reject_related_total": {
"source": [
"reject_related_total_temp2",
"summe_ohne_trockenentfernt"
],
"operation": "multiply"
},
"sd_rückstand": {
"source": [
"sd_auswaage",
"sd_filterblatt"
],
"operation": "subtract"
},
"sd_ergebnis_temp": {
"source": [
"sd_rückstand",
"sd_einwaage"
],
"operation": "divide"
},
"sd_ergebnis": {
"source": [
"sd_ergebnis_temp",
"var_hundred"
],
"operation": "multiply"
},
"sd_rückstand_otro": {
"source": [
"sd_auswaage_ts",
"sd_filterblatt"
],
"operation": "subtract"
},
"sd_ergebnis_otro_temp": {
"source": [
"sd_rückstand_otro",
"sd_einwaage"
],
"operation": "divide"
},
"sd_ergebnis_otro": {
"source": [
"sd_ergebnis_otro_temp",
"var_hundred"
],
"operation": "multiply"
},
"volumen_ac_temp": {
"source": [
"volumen_ac1",
"var_one_point_eight"
],
"operation": "multiply"
},
"volumen_ac": {
"source": [
"volumen_ac_temp",
"grammatur_ac1"
],
"operation": "divide"
},
"volumen_af_temp": {
"source": [
"volumen_af1",
"var_one_point_eight"
],
"operation": "multiply"
},
"volumen_af": {
"source": [
"volumen_af_temp",
"grammatur_af1"
],
"operation": "divide"
},
"feinsortierung_einwaage_temp": {
"source": [
"var_twenty",
"sd_ergebnis"
],
"operation": "divide"
},
"feinsortierung_einwaage": {
"source": [
"feinsortierung_einwaage_temp",
"var_hundred"
],
"operation": "multiply"
},
"macrostickies_einwaage_temp": {
"source": [
"var_macrostickies",
"sd_ergebnis"
],
"operation": "divide"
},
"macrostickies_einwaage": {
"source": [
"macrostickies_einwaage_temp",
"var_hundred"
],
"operation": "multiply"
}
}

View file

@ -0,0 +1,432 @@
{
"probeneingang_datum": {
"name": "Probeneingang Datum",
"description": "Datum des Probeneingangs.",
"unit": ""
},
"probeneingang_name": {
"name": "Probeneingang Name",
"description": "Name der für die Beschaffung verantwortlichen Person.",
"unit": ""
},
"probenvorbereitung_datum": {
"name": "Probenvorbereitung Datum",
"description": "Datum der Probenvorbereitung.",
"unit": ""
},
"probenvorbereitung_name": {
"name": "Probenvorbereitung Name",
"description": "Name der Person, welche die Probe vorbereitet hat.",
"unit": ""
},
"probenuntersuchung_datum": {
"name": "Probenuntersuchung Datum",
"description": "Datum der Probenuntersuchung.",
"unit": ""
},
"probenuntersuchung_name": {
"name": "Probenuntersuchung Name",
"description": "Name der Person, welche die Probe im Labor untersucht hat.",
"unit": ""
},
"auftrag": {
"name": "Auftrag",
"description": "Name/Bezeichnung des Projektes für welches diese Probe untersucht wird.",
"unit": ""
},
"labornummer": {
"name": "Labornummer",
"description": "Laborkennziffer",
"unit": ""
},
"firma": {
"name": "Firma",
"description": "Bei Untersuchungen für Prüfgeschäft, auftraggebende Firma",
"unit": ""
},
"bezeichnung": {
"name": "Bezeichnung",
"description": "Probenbezeichnung bestehend aus Probennummer und Probenbezeichnung",
"unit": ""
},
"bestandteil": {
"name": "Bestandteil",
"description": "Bezeichnung eines Bestandteils der untersuchten Probe.",
"unit": ""
},
"masse": {
"name": "Masse Bestandteil",
"description": "Die Gesamtmasse eines Bestandteils der untersuchten Probe.",
"unit": "g"
},
"tg": {
"name": "Trockengehalt",
"description": "Thermogravimetrisch bestimmter Feststoffgehalt eines Bestandteils.",
"unit": "%"
},
"trockengehalt_mw_1": {
"name": "Trockengehalt",
"description": "Thermogravimetrisch bestimmter Feststoffgehalt (Mittelwert) eines Bestandteils.",
"unit": "%"
},
"trockengehalt_mw_2": {
"name": "Trockengehalt",
"description": "Thermogravimetrisch bestimmter Feststoffgehalt (Mittelwert) eines Bestandteils.",
"unit": "%"
},
"trockengehalt_mw_3": {
"name": "Trockengehalt",
"description": "Thermogravimetrisch bestimmter Feststoffgehalt (Mittelwert) eines Bestandteils.",
"unit": "%"
},
"trockengehalt_mw_4": {
"name": "Trockengehalt",
"description": "Thermogravimetrisch bestimmter Feststoffgehalt (Mittelwert) eines Bestandteils.",
"unit": "%"
},
"trockengehalt_mw_5": {
"name": "Trockengehalt",
"description": "Thermogravimetrisch bestimmter Feststoffgehalt (Mittelwert) eines Bestandteils.",
"unit": "%"
},
"trockengehalt_mw_6": {
"name": "Trockengehalt",
"description": "Thermogravimetrisch bestimmter Feststoffgehalt (Mittelwert) eines Bestandteils.",
"unit": "%"
},
"trockengehalt_mw_7": {
"name": "Trockengehalt",
"description": "Thermogravimetrisch bestimmter Feststoffgehalt (Mittelwert) eines Bestandteils.",
"unit": "%"
},
"trockengehalt_mw_8": {
"name": "Trockengehalt",
"description": "Thermogravimetrisch bestimmter Feststoffgehalt (Mittelwert) eines Bestandteils.",
"unit": "%"
},
"einwaage": {
"name": "Einwaage",
"description": "Masse an Probe, welches für die Zerfaserung verwendet werden muss.",
"unit": "g"
},
"gerätetyp": {
"name": "Gerätetyp",
"description": "Verwendetes Gerät für die Zerfaserung.",
"unit": ""
},
"labor": {
"name": "Labor",
"description": "Labornummer in der die Probe untersucht wurde.",
"unit": ""
},
"zerfaserungsdauer": {
"name": "Zerfaserungsdauer",
"description": "Dauer wie lange die Probe zerfasert wurde.",
"unit": "min"
},
"umdrehungen": {
"name": "Umdrehungen",
"description": "Drehzahl bei der die Probe zerfasert wurde.",
"unit": "U/min"
},
"bemerkung": {
"name": "Bemerkung",
"description": "Auffälligkeiten bei bzw. nach der Zerfaserung der Probe.",
"unit": ""
},
"csb_verdünnung": {
"name": "Verdünnungsfaktor CSB",
"description": "Verdünnungsverhältnis welches trotz des bestehenden Messbereiches der CSB Messung gewählt wurde.",
"unit": ""
},
"csb_messbereich": {
"name": "Messbereich CSB",
"description": "Messbereich welcher für die CSB Messung gewählt wurde.",
"unit": "mg/l"
},
"csb_messwert": {
"name": "CSB Messwert",
"description": "Messwert der CSB Messung im angegebenen Messbereich und bei angegebener Verdünnung.",
"unit": "mg/l"
},
"ars_filtrat_einwaage": {
"name": "Einwaage Filtrat Abdampfrückstand",
"description": "Einwaage an Filtrat der zerfaserten Probe für die Bestimmung des Abdampfrückstands.",
"unit": "g"
},
"ars_wasser_einwaage": {
"name": "Einwaage Wasser Abdampfrückstand",
"description": "Einwaage an Wasser, welches für die Zerfaserung verwendet wurde, für die Bestimmung des Abdampfrückstands.",
"unit": "g"
},
"ph_filtrat": {
"name": "pH-Wert Filtrat",
"description": "pH-Wert des Filtrats der zerfaserten Probe",
"unit": ""
},
"lf_filtrat": {
"name": "Leitfähigkeit Filtrat",
"description": "Leitfähigkeit des Filtrats der zerfaserten Probe",
"unit": "mikroS/cm"
},
"ph_wasser": {
"name": "pH-Wert Wasser",
"description": "pH-Wert des Wassers, welches für die Zerfaserung verwendet wurde.",
"unit": ""
},
"lf_wasser": {
"name": "Leitfähigkeit Wasser",
"description": "Leitfähigkeit des Wassers, welches für die Zerfaserung verwendet wurde.",
"unit": "mikroS/cm"
},
"reject_filtergewicht": {
"name": "Rückstand Filtergewicht",
"description": "Masse des Filterblattes, welches für die Abfiltrierung des Rejects verwendet wurde.",
"unit": "g"
},
"reject_auswaage": {
"name": "Rückstand Auswaage",
"description": "Masse der Auswaage von Filterblatt und Rückstand.",
"unit": "g"
},
"reject_related_total": {
"name": "Rückstand (Gesamtmenge)",
"description": "Rückstand bezogen auf Gesamtprodukt.",
"unit": "%"
},
"faser-stippen-rückstand": {
"name": "Faser-Stippen Rückstand",
"description": "Sind Faser-Stippen im Rückstand enthalten?",
"unit": ""
},
"faser-stippen-rückstand_art-größe": {
"name": "Faser-Stippen Art/Größe",
"description": "Art und Größer der (wenn) enthaltenen Faser-Stippen.",
"unit": ""
},
"papierfremde-bestandteile": {
"name": "Papierfremde Bestandteile",
"description": "Sind papierfremde Produktbestandteile im Rückstand?",
"unit": ""
},
"papierfremde-art-größe": {
"name": "Papierfremde Bestandteile Art/Größe",
"description": "Art und Größe der (wenn) enthaltenen papierfremden Produktbestandteile.",
"unit": ""
},
"papierfremde-zerkleinerung": {
"name": "Papierfremde Bestandteile Zerkleinerungsgrad",
"description": "Wie hoch ist der Zerkleinerungsgrad der papierfremden Produktbestandteile?",
"unit": ""
},
"papierfremde-material": {
"name": "Papierfremde Bestandteile Material",
"description": "Aus welchem Material besteht das papierfremde Material aus dem Produkt.",
"unit": ""
},
"sd_filterblatt": {
"name": "Stoffdichte Filterblatt",
"description": "Masse des Filterblatts, welches für die Bestimmung der Stoffdichte verwendet wurde.",
"unit": "g"
},
"sd_einwaage": {
"name": "Stoffdichte Einwaage",
"description": "Masse an Faserstoffsuspension die für die Bestimmung der Stoffdichte verwendet wurde.",
"unit": "g"
},
"sd_auswaage": {
"name": "Stoffdichte Auswaage",
"description": "Masse des Filterblatts inklusive Faserstoff, nach der Trocknung im Blatttrockner, für die Bestimmung der Stoffdichte.",
"unit": "g"
},
"sd_auswaage_ts": {
"name": "Stoffdichte Auswaage (TS)",
"description": "Masse des Filterblatts inklusive Faserstoff, nach der Trocknung im Trockenschrank bis zur Massekonstanz, für die Bestimmung der Stoffdichte.",
"unit": "g"
},
"volumen": {
"name": "Masse Faserstoffsuspension",
"description": "Masse an Faserstoffsuspension verwendet wurde, um die Grammatur für die Folgeblätter zu bestimmen.",
"unit": "g"
},
"grammatur": {
"name": "Grammatur",
"description": "Masse des hergestellten Ansichtsblattes.",
"unit": "g"
},
"bkt_bewertung_grob_prüfer": {
"name": "Blattklebetest Grobsortierung Prüfer",
"description": "Name des Prüfers des Blattklebetests der Blätter der Grobsortierung.",
"unit": ""
},
"bkt_bewertung_grob_bewertung": {
"name": "Blattklebetest Grobsortierung Bewertung",
"description": "Bewertung des Blattklebetests der Blätter der Grobsortierung.",
"unit": ""
},
"bkt_bewertung_grob_bemerkung": {
"name": "Blattklebetest Grobsortierung Bemerkung",
"description": "Bemerkungen zu den Blattklebetests der Blätter der Grobsortierung.",
"unit": ""
},
"bkt_bewertung_fein_prüfer": {
"name": "Blattklebetest Feinsortierung Prüfer",
"description": "Name des Prüfers des Blattklebetests der Blätter der Feinsortierung.",
"unit": ""
},
"bkt_bewertung_fein_bewertung": {
"name": "Blattklebetest Feinsortierung Bewertung",
"description": "Bewertung des Blattklebetests der Blätter der Feinsortierung.",
"unit": ""
},
"bkt_bewertung_fein_bemerkung": {
"name": "Blattklebetest Feinsortierung Bemerkung",
"description": "Bemerkungen zu den Blattklebetests der Blätter der Feinsortierung.",
"unit": ""
},
"vb_bewertung_grob_prüfer": {
"name": "Visuelle Beurteilung Grobsortierung Prüfer",
"description": "Name des Prüfers der visuellen Beurteilung der Blätter der Grobsortierung.",
"unit": ""
},
"vb_bewertung_grob_bewertung": {
"name": "Visuelle Beurteilung Grobsortierung Bewertung",
"description": "Visuelle Beurteilung der Blätter der Grobsortierung.",
"unit": ""
},
"vb_bewertung_grob_bemerkung": {
"name": "Visuelle Beurteilung Grobsortierung Bemerkung",
"description": "Bemerkungen zu der visuellen Beurteilung der Blätter der Grobsortierung.",
"unit": ""
},
"vb_bewertung_fein_prüfer": {
"name": "Visuelle Beurteilung Feinsortierung Prüfer",
"description": "Name des Prüfers der visuellen Beurteilung der Blätter der Feinsortierung.",
"unit": ""
},
"vb_bewertung_fein_bewertung": {
"name": "Visuelle Beurteilung Feinsortierung Bewertung",
"description": "Visuelle Beurteilung der Blätter der Feinsortierung.",
"unit": ""
},
"vb_bewertung_fein_bemerkung": {
"name": "Visuelle Beurteilung Feinsortierung Bemerkung",
"description": "Bemerkungen zu der visuellen Beurteilung der Blätter der Feinsortierung.",
"unit": ""
},
"trockengehalt_mw_x": {
"name": "Mittelwert Trockengehalt",
"description": "Mittelwert der Doppelbestimmung vom Trockengehalt eines Bestandteils der Probe.",
"unit": "%"
},
"summe_massen": {
"name": "Summe Massen",
"description": "Summe der Massen aller einzelnen Bestandteile",
"unit": "g"
},
"anteil_an_gesamtprobe_x": {
"name": "Anteil an Gesamtprobe",
"description": "Der Anteil eines Bestandteils an der Gesamtprobe, in Bezug auf die Masse.",
"unit": "%"
},
"einwaage_von_50g_otro_x": {
"name": "Einwaage Bestandteile",
"description": "Masse an lufttrockenem Bestandteil, welcher für die Untersuchung verwendet wird.",
"unit": "g"
},
"summe_einwaage_50g-otro": {
"name": "Einwaage Probe",
"description": "Gesamtmasse an Probe die für die Untersuchung verwendet werden muss.",
"unit": "g"
},
"csb_g_pro_kg": {
"name": "CSB [g/kg]",
"description": "Der chemische Sauerstoffbedarf bezogen auf g Sauerstoff pro kg Faserstoff.",
"unit": "g/kg"
},
"csb_prozent": {
"name": "CSB [%]",
"description": "Der chemische Sauerstoffbedarf in Prozent.",
"unit": "%"
},
"ars_rückstand": {
"name": "Abdampfrückstand Rückstand",
"description": "Rückstand, welcher in der Schaale zurückbleibt.",
"unit": "g"
},
"ars_abdampfrückstand": {
"name": "Abdampfrückstand",
"description": "Rückstand bezogen auf die Einwaage.",
"unit": "g"
},
"ars_mittelwert": {
"name": "Abdampfrückstand Mittelwert",
"description": "Mittelwert der Abdampfrückständ",
"unit": "g"
},
"ars_abdampfrückstand_probe_mg_pro_g": {
"name": "Abdampfrückstand Probe [mg/g]",
"description": "Abdampfrückstand abzüglich des ARS von Wasser in mg Abdampfrückstand pro g Faserstoff.",
"unit": "mg/g"
},
"ars_abdampfrückstand_probe_%": {
"name": "Abdampfrückstand Probe [%]",
"description": "Adampfrückstand abzüglich des ARS von Wasser in Prozent.",
"unit": "%"
},
"reject_otro_g": {
"name": "Rückstand [g]",
"description": "Masse des Rückstands der Fraktionierplatte (otro).",
"unit": "g"
},
"reject_otro_%": {
"name": "Rückstand [%]",
"description": "Anteil an Rückstand welches durch die Fraktionierplatte abgetrennt wurde, bezogen auf die verwendete Probenmasse.",
"unit": "g"
},
"sd_rückstand": {
"name": "Stoffdichte Rückstand",
"description": "Differenz zwischen Auswaage und Filterblatt, nach der Trocknung im Blatttrockner.",
"unit": "g"
},
"sd_ergebnis": {
"name": "Stoffdichte",
"description": "Ermittelte Stoffdichte der Faserstoffsuspension nach der Grobsortierung.",
"unit": "%"
},
"sd_rückstand_otro": {
"name": "Stoffdichte Rückstand otro",
"description": "Differenz zwischen Auswaage und Filterblatt nach der Trocknung im Trockenschrank bis zur konstanten Masse.",
"unit": "g"
},
"sd_ergebnis_otro": {
"name": "Stoffdichte otro",
"description": "Ermittelte Stoffdichte der Faserstoffsuspension nach der Grobsortierung, nach Trocknung im Trockenschrank bis zur konstanten Masse.",
"unit": "%"
},
"volumen_blätter": {
"name": "Volumen Blatt",
"description": "Ermittelte Masse an Faserstoffsuspension die nötig ist, um ein Blatt mit einer Grammatur von 1,8 g herzustellen.",
"unit": "g"
},
"feinsortierung_einwaage": {
"name": "Einwaage Feinsortierung",
"description": "Masse an Faserstoffsuspension welche äquivalent zu 20 g otro Faserstoff ist. Wird für die Feinsortierung verwendet.",
"unit": "g"
},
"macrostickies_einwaage": {
"name": "Einwaage Mackrostickies",
"description": "Masse an Faserstoffsuspension welche äquivalent zu [var_macrstickies] Faserstoffsuspension ist. Wird für die Bestimmung von Mackrostickies verwendet.",
"unit": "g"
},
"reject_related_grob_exkl": {
"name": "Rückstand Grob ohne Trockenentfernt",
"description": "Anteil an Grobrückstand., ohne trockenentfernte Komponenten.",
"unit": "%"
},
"reject_related_grob_inkl": {
"name": "Rückstand Grob mit Trockenentfernt",
"description": "Anteil an Grobrückstand., mit trockenentfernte Komponenten.",
"unit": "%"
}
}

2
python-cepi-master/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
*.xlsx
*.exe

View file

@ -0,0 +1,549 @@
import os
from openpyxl import *
def read(filepath):
def read_datasheet(workbook):
#reset lists
k=[]
m=[]
workbook = load_workbook(filepath, data_only=True)
datasheet = workbook["Data sheet"]
version = datasheet.cell(row=1, column=1).value
#max index
max = 72
if version == "V23.01":
for i in range(max):
k.append(0)
m.append(0)
results["version_datasheet"] = version
results["general data"] = {}
results["product details"] = {}
results["paper based material data"] = {}
results["functional application out"] = {}
results["functional application in"] = {}
results["functional application mid"] = {}
results["printing"] = {}
results["varnish"] = {}
results["glue"] = {}
results["sealing"] = {}
results["non paper based"] = {}
results["other remarks"] = {}
results["date of production"] = {}
results["barrier perfomance out"] = {}
results["barrier perfomance in"] = {}
results["compostability and biodegradability"] = {}
results["general data"]["company name"] = cell(datasheet, 4, 6, 0)
results["general data"]["product name"] = cell(datasheet, 5, 6, 1)
results["general data"]["desription"] = cell(datasheet, 6, 6, 2)
results["general data"]["material/product"] = cell(datasheet, 7, 6, 3)
results["product details"]["width and tolerance"] = cell(datasheet, 10, 6, 4)
results["product details"]["length and tolerance"] = cell(datasheet, 11, 6, 5)
results["product details"]["height and tolerance"] = cell(datasheet, 12, 6, 6)
results["product details"]["weight of product"] = cell(datasheet, 13, 6, 7)
results["product details"]["is it a used product"] = cell(datasheet, 14, 6, 8)
results["paper based material data"]["base material"] = cell(datasheet, 17, 6, 9)
results["paper based material data"]["grammage"] = cell(datasheet, 18, 6, 10)
results["paper based material data"]["thickness"] = cell(datasheet, 19, 6, 11)
results["paper based material data"]["pigments"] = cell(datasheet, 20, 6, 12)
results["paper based material data"]["fillers"] = cell(datasheet, 21, 6, 13)
results["paper based material data"]["wet streght polymers"] = cell(datasheet, 22, 6, 14)
results["paper based material data"]["artificial fibres"] = cell(datasheet, 23, 6, 15)
results["paper based material data"]["printed, varnished, lacquered,..."] = cell(datasheet, 24, 6, 16)
results["paper based material data"]["printing"] = cell(datasheet, 25, 6, 17)
results["paper based material data"]["varnish"] = cell(datasheet, 26, 6, 18)
results["paper based material data"]["adhesive"] = cell(datasheet, 27, 6, 19)
results["paper based material data"]["additional sealing"] = cell(datasheet, 28, 6, 20)
results["paper based material data"]["functional app out"] = cell(datasheet, 29, 6, 21)
results["paper based material data"]["functional app in"] = cell(datasheet, 30, 6, 22)
results["paper based material data"]["functional app mid"] = cell(datasheet, 31, 6, 23)
results["paper based material data"]["addtional information"] = cell(datasheet, 32, 6, 23)
results["functional application out"]["description"] = cell(datasheet, 34, 6, 24)
results["functional application out"]["grammage"] = cell(datasheet, 35, 6, 25)
results["functional application out"]["thickness"] = cell(datasheet, 36, 6, 26)
results["functional application out"]["related area"] = cell(datasheet, 37, 6, 27)
results["functional application in"]["description"] = cell(datasheet, 40, 6, 28)
results["functional application in"]["grammage"] = cell(datasheet, 41, 6, 29)
results["functional application in"]["thickness"] = cell(datasheet, 42, 6, 30)
results["functional application in"]["related area"] = cell(datasheet, 43, 6, 31)
results["functional application mid"]["description"] = cell(datasheet, 46, 6, 32)
results["functional application mid"]["grammage"] = cell(datasheet, 47, 6, 33)
results["functional application mid"]["thickness"] = cell(datasheet, 48, 6, 34)
results["functional application mid"]["related area"] = cell(datasheet, 49, 6, 35)
results["functional application mid"]["overall grammage"] = cell(datasheet, 51, 6, 36)
results["functional application mid"]["overall thickness"] = cell(datasheet, 52, 6, 37)
results["functional application mid"]["additional specs"] = cell(datasheet, 53, 6, 38)
results["printing"]["type description"] = cell(datasheet, 56, 6, 39)
results["printing"]["weight"] = cell(datasheet, 57, 6, 40)
results["printing"]["printing area"] = cell(datasheet, 58, 6, 41)
results["varnish"]["type description"] = cell(datasheet, 61, 6, 42)
results["varnish"]["weight"] = cell(datasheet, 62, 6, 43)
results["varnish"]["varnish area"] = cell(datasheet, 63, 6, 44)
results["glue"]["type description"] = cell(datasheet, 66, 6, 45)
results["glue"]["use description"] = cell(datasheet, 67, 6, 46)
results["glue"]["weight"] = cell(datasheet, 68, 6, 47)
results["sealing"]["type description"] = cell(datasheet, 71, 6, 48)
results["sealing"]["weight"] = cell(datasheet, 72, 6, 49)
results["sealing"]["sealing"] = cell(datasheet, 73, 6, 50)
results["non paper based"]["description"] = cell(datasheet, 76, 6, 51)
results["non paper based"]["weight"] = cell(datasheet, 77, 6, 52)
results["non paper based"]["removability"] = cell(datasheet, 78, 6, 53)
results["other remarks"]["other remarks"] = cell(datasheet, 81, 6, 54)
results["date of production"]["date of production"] = cell(datasheet, 84, 6, 55)
results["barrier perfomance out"]["otr"] = cell(datasheet, 87, 6, 56)
results["barrier perfomance out"]["wvtr"] = cell(datasheet, 88, 6, 57)
results["barrier perfomance out"]["grease/oil/fat"] = cell(datasheet, 89, 6, 58)
results["barrier perfomance out"]["mosh/moah"] = cell(datasheet, 90, 6, 59)
results["barrier perfomance out"]["water barrier"] = cell(datasheet, 91, 6, 60)
results["barrier perfomance out"]["nature of coating"] = cell(datasheet, 92, 6, 61)
results["barrier perfomance in"]["otr"] = cell(datasheet, 95, 6, 62)
results["barrier perfomance in"]["wvtr"] = cell(datasheet, 96, 6, 63)
results["barrier perfomance in"]["grease/oil/fat"] = cell(datasheet, 97, 6, 64)
results["barrier perfomance in"]["mosh/moah"] = cell(datasheet, 98, 6, 65)
results["barrier perfomance in"]["water barrier"] = cell(datasheet, 99, 6, 66)
results["barrier perfomance in"]["nature of coating"] = cell(datasheet, 100, 6, 67)
results["compostability and biodegradability"]["compost"] = cell(datasheet, 103, 6, 68)
results["compostability and biodegradability"]["home compost"] = cell(datasheet, 104, 6, 69)
results["compostability and biodegradability"]["biodeg aqua"] = cell(datasheet, 105, 6, 70)
results["compostability and biodegradability"]["biodeg soil"] = cell(datasheet, 106, 6, 71)
def cell(worksheet, row, column, index):
return str(worksheet.cell(row=row + k[index], column=column + m[index]).value)
def versioncheck(workbook):
ubersicht = workbook["Übersicht"]
columnb = []
for cell in ubersicht["B"]:
columnb.append(cell.value)
#clean list
version = []
for i in columnb:
if i is not None:
version.append(i)
return version[-1]
results = {}
if filepath.endswith('.xlsx'): # sicherstellen, dass es sich um eine Excel-Datei handelt
#load Excelfile from path
workbook = load_workbook(filepath, data_only=True)
#lil thingi so it doesn't crash
if "Übersicht" in workbook.sheetnames and "Nasslaborteil" in workbook.sheetnames and "Blattklebetest" in workbook.sheetnames and "visuelle Beurteilung" in workbook.sheetnames:
print(filepath + " vollständig")
else:
#avoid crashing due to wrong .xlsx
return results
version = versioncheck(workbook)
print(version)
max_index = 126
k = []
m = []
def shift_ars2(k,m):
m[62] = 10
m[63] = 10
m[64] = 10
m[65] = -1
m[66] = -1
m[67] = -1
m[68] = 10
m[69] = 10
m[69] = 10
def shift_pH_lf(k,m):
m[71] = 10
m[72] = 10
m[73] = 10
m[74] = 10
#corrects the k and m lists so it matches the bkt and vb of the cepi sheets < V22.8
def correct_bkt_vb_old(k,m):
for i in range(107, 113):
k.append(0)
m.append(0)
k[108] = 5
k[111] = 5
k[109] = 5
k[112] = 5
for i in range(113, 119):
k.append(5)
m.append(0)
k[114] = 10
k[115] = 10
k[117] = 10
k[118] = 10
#vb
for i in range(119, max_index+1):
k.append(0)
m.append(0)
k[120] = -4
k[122] = -4
k[123] = -1
k[125] = -1
k[124] = -5
k[126] = -5
m[121] = -1
m[122] = -1
m[125] = -1
m[126] = -1
if version == "V22.1" or version == "V22.2" or version == "V22.3":
for i in range(0, 62):
#normal till ARS 2
k.append(0)
m.append(0)
for i in range (62, 75):
#shift ARS 2 to 4 to the side so it gets blank
k.append(0)
m.append(50)
for i in range(75, 93):
k.append(-3)
m.append(0)
for i in range(93, 96):
k.append(-1)
m.append(0)
for i in range(96, 107):
k.append(-2)
m.append(0)
correct_bkt_vb_old(k,m)
#verified
elif version == "V22.6":
for i in range(0, 75):
k.append(0)
m.append(0)
shift_ars2(k,m)
shift_pH_lf(k,m)
for i in range(75,93):
k.append(-4)
m.append(0)
for i in range(93,107):
k.append(-2)
m.append(0)
correct_bkt_vb_old(k,m)
#verified
elif version == "V22.7":
for i in range(0, 107):
k.append(0)
m.append(0)
for i in range(75, 93):
k[i] = -2
m[62] = 10
m[63] = 10
m[64] = 10
m[65] = -1
m[66] = -1
m[67] = -1
correct_bkt_vb_old(k,m)
#verfied
elif version == "V22.8" or version == "V22.9":
for i in range(0, max_index+1):
k.append(0)
m.append(0)
#shift ARS
m[62] = 10
m[63] = 10
m[64] = 10
m[65] = -1
m[66] = -1
m[67] = -1
for i in range(75, 93):
k[i] = -2
#verified
elif version =="V23.02" or version == "V23.04" or version == "V23.05":
for i in range(0, max_index+1):
k.append(0)
m.append(0)
for i in range(75, 93):
k[i] = -1
for i in range(71, 75):
k[i] = 1
for i in range(93, 107):
k[i] = 1
m[73] = 1
m[74] = 1
#verified
#If version not supported
else:
version = ""
print("Version nicht unterstützt")
return results
#load all sheets
ubersicht = workbook["Übersicht"]
nasslabor = workbook["Nasslaborteil"]
blattklebetest = workbook["Blattklebetest"]
visual = workbook["visuelle Beurteilung"]
#create keys
results["version"] = {}
results["übersicht"] = {}
results["nasslaborteil"] = {}
results["nasslaborteil_grob"] = {}
results["nasslaborteil_fein"] = {}
results["blattklebetest"] = {}
results["visuelle beurteilung"] = {}
#start the read
results["version"]["version"] = version
#first page
results["übersicht"]["probeneingang_datum"] = cell(ubersicht, 3, 3, 0)
results["übersicht"]["probeneingang_name"] = cell(ubersicht, 3, 4, 1)
results["übersicht"]["probenvorbereitung_datum"] = cell(ubersicht, 4, 3, 2)
results["übersicht"]["probenvorbereitung_name"] = cell(ubersicht, 4, 4, 3)
results["übersicht"]["probenuntersuchung_datum"] = cell(ubersicht, 6, 3, 4)
results["übersicht"]["probenuntersuchung_name"] = cell(ubersicht, 6, 4, 5)
results["übersicht"]["auftrag"] = cell(ubersicht, 8, 3, 6)
results["übersicht"]["labornummer"] = cell(ubersicht, 7, 3, 7)
results["übersicht"]["firma"] = cell(ubersicht, 9, 3, 8)
prob_nr = ubersicht.cell(row=10, column=3).value
prob_bez = ubersicht.cell(row=10, column=4).value
if prob_nr == None:
prob_nr = " "
if prob_bez == None:
prob_bez = " "
results["übersicht"]["bezeichnung"] = prob_nr + " | " + prob_bez
#second page
results["nasslaborteil"]["bestandteil_1"] = cell(nasslabor, 15, 2, 10)
results["nasslaborteil"]["bestandteil_2"] = cell(nasslabor, 16, 2, 11)
results["nasslaborteil"]["bestandteil_3"] = cell(nasslabor, 17, 2, 12)
results["nasslaborteil"]["bestandteil_4"] = cell(nasslabor, 18, 2, 13)
results["nasslaborteil"]["bestandteil_5"] = cell(nasslabor, 19, 2, 14)
results["nasslaborteil"]["bestandteil_6"] = cell(nasslabor, 20, 2, 15)
results["nasslaborteil"]["bestandteil_7"] = cell(nasslabor, 21, 2, 16)
results["nasslaborteil"]["bestandteil_8"] = cell(nasslabor, 22, 2, 17)
#nicht schön aber selten
results["nasslaborteil"]["masse_1"] = cell(nasslabor, 15, 4, 18)
results["nasslaborteil"]["masse_2"] = cell(nasslabor, 16, 4, 19)
results["nasslaborteil"]["masse_3"] = cell(nasslabor, 17, 4, 20)
results["nasslaborteil"]["masse_4"] = cell(nasslabor, 18, 4, 21)
results["nasslaborteil"]["masse_5"] = cell(nasslabor, 19, 4, 22)
results["nasslaborteil"]["masse_6"] = cell(nasslabor, 20, 4, 23)
results["nasslaborteil"]["masse_7"] = cell(nasslabor, 21, 4, 24)
results["nasslaborteil"]["masse_8"] = cell(nasslabor, 22, 4, 25)
results["nasslaborteil"]["trocken_entfernt_1"] = cell(nasslabor, 15, 6, 26)
results["nasslaborteil"]["trocken_entfernt_2"] = cell(nasslabor, 16, 6, 27)
results["nasslaborteil"]["trocken_entfernt_3"] = cell(nasslabor, 17, 6, 28)
results["nasslaborteil"]["trocken_entfernt_4"] = cell(nasslabor, 18, 6, 29)
results["nasslaborteil"]["trocken_entfernt_5"] = cell(nasslabor, 19, 6, 30)
results["nasslaborteil"]["trocken_entfernt_6"] = cell(nasslabor, 20, 6, 31)
results["nasslaborteil"]["trocken_entfernt_7"] = cell(nasslabor, 21, 6, 32)
results["nasslaborteil"]["trocken_entfernt_8"] = cell(nasslabor, 22, 6, 33)
results["nasslaborteil"]["tg_1_1"] = cell(nasslabor, 29, 3, 34)
results["nasslaborteil"]["tg_1_2"] = cell(nasslabor, 30, 3, 35)
results["nasslaborteil"]["tg_1_3"] = cell(nasslabor, 31, 3, 36)
results["nasslaborteil"]["tg_1_4"] = cell(nasslabor, 32, 3, 37)
results["nasslaborteil"]["tg_1_5"] = cell(nasslabor, 33, 3, 38)
results["nasslaborteil"]["tg_1_6"] = cell(nasslabor, 34, 3, 39)
results["nasslaborteil"]["tg_1_7"] = cell(nasslabor, 35, 3, 40)
results["nasslaborteil"]["tg_1_8"] = cell(nasslabor, 36, 3, 41)
results["nasslaborteil"]["tg_2_1"] = cell(nasslabor, 29, 4, 42)
results["nasslaborteil"]["tg_2_2"] = cell(nasslabor, 30, 4, 43)
results["nasslaborteil"]["tg_2_3"] = cell(nasslabor, 31, 4, 44)
results["nasslaborteil"]["tg_2_4"] = cell(nasslabor, 32, 4, 45)
results["nasslaborteil"]["tg_2_5"] = cell(nasslabor, 33, 4, 46)
results["nasslaborteil"]["tg_2_6"] = cell(nasslabor, 34, 4, 47)
results["nasslaborteil"]["tg_2_7"] = cell(nasslabor, 35, 4, 48)
results["nasslaborteil"]["tg_2_8"] = cell(nasslabor, 36, 4, 49)
results["nasslaborteil"]["einwaage"] = cell(nasslabor, 23, 8, 50)
results["nasslaborteil"]["gerätetyp"] = cell(nasslabor, 41, 3, 51)
results["nasslaborteil"]["labor"] = cell(nasslabor, 42, 3, 52)
results["nasslaborteil"]["zerfaserungsdauer"] = cell(nasslabor, 43, 3, 53)
results["nasslaborteil"]["umdrehungen"] = cell(nasslabor, 44, 3, 54)
results["nasslaborteil"]["bemerkung"] = cell(nasslabor, 45, 3, 55)
results["nasslaborteil"]["csb_verdünnung"] = cell(nasslabor, 59, 4, 56)
results["nasslaborteil"]["csb_messbereich"] = cell(nasslabor, 60, 4, 57)
results["nasslaborteil"]["csb_messwert"] = cell(nasslabor, 61, 4, 58)
results["nasslaborteil"]["ars_filtrat1_einwaage"] = cell(nasslabor, 70, 3, 59)
results["nasslaborteil"]["ars_filtrat1_masse_schale"] = cell(nasslabor, 71, 3, 60)
results["nasslaborteil"]["ars_filtrat1_auswaage"] = cell(nasslabor, 72, 3, 61)
results["nasslaborteil"]["ars_filtrat2_einwaage"] = cell(nasslabor, 70, 4, 62)
results["nasslaborteil"]["ars_filtrat2_masse_schale"] = cell(nasslabor, 71, 4, 63)
results["nasslaborteil"]["ars_filtrat2_auswaage"] = cell(nasslabor, 72, 4, 64)
results["nasslaborteil"]["ars_wasser1_einwaage"] = cell(nasslabor, 70, 5, 65)
results["nasslaborteil"]["ars_wasser1_masse_schale"] = cell(nasslabor, 71, 5, 66)
results["nasslaborteil"]["ars_wasser1_auswaage"] = cell(nasslabor, 72, 5, 67)
results["nasslaborteil"]["ars_wasser2_einwaage"] = cell(nasslabor, 70, 6, 68)
results["nasslaborteil"]["ars_wasser2_masse_schale"] = cell(nasslabor, 71, 6, 69)
results["nasslaborteil"]["ars_wasser2_auswaage"] = cell(nasslabor, 72, 6, 70)
results["nasslaborteil"]["ph_filtrat"] = cell(nasslabor, 77, 3, 71)
results["nasslaborteil"]["lf_filtrat"] = cell(nasslabor, 78, 3, 72)
results["nasslaborteil"]["ph_wasser"] = cell(nasslabor, 77, 4, 73)
results["nasslaborteil"]["lf_wasser"] = cell(nasslabor, 78, 4, 74)
results["nasslaborteil_grob"]["labor"] = cell(nasslabor, 87, 3, 75)
results["nasslaborteil_grob"]["reject_filtergewicht_grob"] = cell(nasslabor, 99, 3, 76)
results["nasslaborteil_grob"]["reject_auswaage_grob"] = cell(nasslabor, 100, 3, 77)
results["nasslaborteil_grob"]["faser-stippen-rückstand"] = cell(nasslabor, 108, 3, 78)
results["nasslaborteil_grob"]["faser-stippen-rückstand_art-größe"] = cell(nasslabor, 109, 3, 79)
results["nasslaborteil_grob"]["papierfremde-bestandteile"] = cell(nasslabor, 111, 3, 80)
results["nasslaborteil_grob"]["papierfremde-art-größe"] = cell(nasslabor, 112, 3, 81)
results["nasslaborteil_grob"]["papierfremde-zerkleinerung"] = cell(nasslabor, 113, 3, 82)
results["nasslaborteil_grob"]["papierfremde-material"] = cell(nasslabor, 114, 3, 83)
results["nasslaborteil_grob"]["sd_filterblatt"] = cell(nasslabor, 117, 3, 84)
results["nasslaborteil_grob"]["sd_einwaage"] = cell(nasslabor, 118, 3, 85)
results["nasslaborteil_grob"]["sd_auswaage"] = cell(nasslabor, 119, 3, 86)
results["nasslaborteil_grob"]["sd_auswaage_ts"] = cell(nasslabor, 126, 3, 87)
results["nasslaborteil_grob"]["volumen_ac1"] = cell(nasslabor, 131, 3, 88)
results["nasslaborteil_grob"]["grammatur_ac1"] = cell(nasslabor, 131, 4, 89)
results["nasslaborteil_grob"]["grammatur_ac2"] = cell(nasslabor, 132, 4, 90)
results["nasslaborteil_grob"]["grammatur_ac3"] = cell(nasslabor, 133, 4, 91)
results["nasslaborteil_grob"]["grammatur_ac4"] = cell(nasslabor, 134, 4, 92)
results["nasslaborteil_fein"]["labor"] = cell(nasslabor, 143, 3, 93)
results["nasslaborteil_fein"]["reject_filtergewicht_fein"] = cell(nasslabor, 155, 3, 94)
results["nasslaborteil_fein"]["reject_auswaage_fein"] = cell(nasslabor, 156, 3, 95)
results["nasslaborteil_fein"]["faser-stippen-rückstand"] = cell(nasslabor, 163, 3, 96)
results["nasslaborteil_fein"]["faser-stippen-rückstand_art-größe"] = cell(nasslabor, 164, 3, 97)
results["nasslaborteil_fein"]["papierfremde-bestandteile"] = cell(nasslabor, 166, 3, 98)
results["nasslaborteil_fein"]["papierfremde-art-größe"] = cell(nasslabor, 167, 3, 99)
results["nasslaborteil_fein"]["papierfremde-zerkleinerung"] = cell(nasslabor, 168, 3, 100)
results["nasslaborteil_fein"]["papierfremde-material"] = cell(nasslabor, 169, 3, 101)
results["nasslaborteil_fein"]["volumen_af1"] = cell(nasslabor, 172, 3, 102)
results["nasslaborteil_fein"]["grammatur_af1"] = cell(nasslabor, 172, 4, 103)
results["nasslaborteil_fein"]["grammatur_af2"] = cell(nasslabor, 173, 4, 104)
results["nasslaborteil_fein"]["grammatur_af3"] = cell(nasslabor, 174, 4, 105)
results["nasslaborteil_fein"]["grammatur_af4"] = cell(nasslabor, 175, 4, 106)
results["blattklebetest"]["bewertung_grob_1_prüfer"] = cell(blattklebetest, 11, 3, 107)
results["blattklebetest"]["bewertung_grob_1_bewertung"] = cell(blattklebetest, 17, 3, 108)
results["blattklebetest"]["bewertung_grob_1_bemerkung"] = cell(blattklebetest, 18, 3, 109)
results["blattklebetest"]["bewertung_grob_ges_prüfer"] = cell(blattklebetest, 11, 5, 110)
results["blattklebetest"]["bewertung_grob_ges_bewertung"] = cell(blattklebetest, 17, 5, 111)
results["blattklebetest"]["bewertung_grob_ges_bemerkung"] = cell(blattklebetest, 18, 5, 112)
results["blattklebetest"]["bewertung_fein_1_prüfer"] = cell(blattklebetest, 21, 3, 113)
results["blattklebetest"]["bewertung_fein_1_bewertung"] = cell(blattklebetest, 27, 3, 114)
results["blattklebetest"]["bewertung_fein_1_bemerkung"] = cell(blattklebetest, 28, 3, 115)
results["blattklebetest"]["bewertung_fein_ges_prüfer"] = cell(blattklebetest, 21, 5, 116)
results["blattklebetest"]["bewertung_fein_ges_bewertung"] = cell(blattklebetest, 27, 5, 117)
results["blattklebetest"]["bewertung_fein_ges_bemerkung"] = cell(blattklebetest, 28, 5, 118)
results["visuelle beurteilung"]["bewertung_grob_1_prüfer"] = cell(visual, 11, 3, 119)
results["visuelle beurteilung"]["bewertung_grob_1_bewertung"] = cell(visual, 20, 3, 120)
results["visuelle beurteilung"]["bewertung_grob_ges_prüfer"] = cell(visual, 11, 6, 121)
results["visuelle beurteilung"]["bewertung_grob_ges_bewertung"] = cell(visual, 20, 6, 122)
results["visuelle beurteilung"]["bewertung_fein_1_prüfer"] = cell(visual, 23, 3, 123)
results["visuelle beurteilung"]["bewertung_fein_1_bewertung"] = cell(visual, 32, 3, 124)
results["visuelle beurteilung"]["bewertung_fein_ges_prüfer"] = cell(visual, 23, 6, 125)
results["visuelle beurteilung"]["bewertung_fein_ges_bewertung"] = cell(visual, 32, 6, 126)
#insert dummy values for pH & lf for older versions
if version == "V22.1" or version == "V22.2" or version == "V22.3" or version == "V22.6":
results["nasslaborteil"]["ph_filtrat"] = 0
results["nasslaborteil"]["lf_filtrat"] = 0
results["nasslaborteil"]["ph_wasser"] = 0
results["nasslaborteil"]["lf_wasser"] = 0
#TODO csb dummy value ergänzen
if workbook["Data sheet"]:
#only uncommend for testing reasons or if it got implemented into the database
read_datasheet(workbook)
return results
def read_all(path):
data = []
for filename in os.listdir(path):
data.append(read(path+filename))
return data
def sheet_test(path):
list = read(path)
for value in list["übersicht"].values():
if value != "true":
print("Error Übersicht!")
for value in list["nasslaborteil"].values():
if value != "true":
print("Error Nasslaborteil!")
for value in list["nasslaborteil_grob"].values():
if value != "true":
print("Error Nasslaborteil_grob!")
for value in list["nasslaborteil_fein"].values():
if value != "true":
print("Error Nasslaborteil_fein!")
for value in list["blattklebetest"].values():
if value != "true":
print("Error BKT!")
for value in list["visuelle beurteilung"].values():
if value != "true":
print("Error VB!")
x = input("Full output? J/N: ")
if x == "J" or x == "j":
print(list)
else:
print("ok.")
if __name__ == "__main__":
print(read("C:/Users/T.Priebe/Documents/3/ExcelCepi/examples/CEPI-method_Laufzettel_Part I_V23.07.xlsx"))

View file

@ -0,0 +1,240 @@
import os
from openpyxl import *
from openpyxl.styles import Font
from openpyxl.styles import numbers
from openpyxl.styles import Alignment
import tkinter as tk
from tkinter import filedialog
from tkinter import messagebox
from tkinter import font
from tkinter import ttk
import customtkinter as ctk
input_dir = ""
output_dir = ""
def unify_data(data):
if isinstance(data, float) or isinstance(data, int):
return float(data)
else:
return 0.0
def read_data(input, output, save_name):
results = {}
n=len(os.listdir(input))
i=0
for filename in os.listdir(input):
if filename.endswith('.xlsx'): # sicherstellen, dass es sich um eine Excel-Datei handelt
filepath = os.path.join(input, filename)
workbook = load_workbook(filepath, data_only=True)
if "Nasslaborteil" in workbook.sheetnames:
print("vorhanden " + filename)
else:
print("nicht vorhanden " + filename)
continue
worksheet = workbook["Nasslaborteil"]
results[filename] = {}
# print(filename)
column_index = 2 # Spalte B
for row in worksheet.iter_rows():
for cell in row:
if cell.value == "Probennr. und -bezeichnung":
displayname = worksheet.cell(row=cell.row, column=column_index + 2)
results[filename]["Displayname"] = displayname.value
break
if cell.value == 'Abdampfrückstand Probe [%]':
ars = worksheet.cell(row=cell.row, column=column_index + 1)
print(type(ars.value))
results[filename]["Abdampfrückstand"] = unify_data(ars.value)
print(unify_data(ars.value))
break
if cell.value == "Messwert [mg/l]":
csb = worksheet.cell(row=cell.row, column=column_index + 2)
results[filename]["CSB"] = unify_data(csb.value)
break
if cell.value == "pH-Wert":
ph = worksheet.cell(row=cell.row, column=column_index + 1)
results[filename]["pH-Wert"] = unify_data(ph.value)
break
if cell.value == "Leitfähigkeit":
lf = worksheet.cell(row=cell.row, column=column_index + 1)
results[filename]["Leitfähigkeit"] = unify_data(lf.value)
break
if cell.value == "Rückstand Grob incl. dry removed components":
coarse = worksheet.cell(row=cell.row, column=column_index + 1)
results[filename]["Coarse Reject"] = unify_data(coarse.value)
break
if cell.value == "fine reject (related to total product)" or cell.value == "Rückstandsanteil an Einwaage otro [%]":
fine = worksheet.cell(row=cell.row, column=column_index + 1)
results[filename]["Fine Reject"] = unify_data(fine.value)
break
#Load WBPage for BKT and save in Liste cuz 2 values
worksheet_bkt = workbook["Blattklebetest"]
bkt = []
for row in worksheet_bkt.iter_rows():
for cell in row:
if cell.value == "Evaluation":
bkt.append(worksheet_bkt.cell(row=cell.row, column=column_index + 3).value)
for i in range(len(bkt)):
results[filename]["Blattklebetest " + str(i)] = bkt[i]
worksheet_vb = workbook["visuelle Beurteilung"]
vb = []
for row in worksheet_vb.iter_rows():
for cell in row:
if cell.value == "Evaluation":
vb.append(worksheet_vb.cell(row=cell.row, column=column_index + 4).value)
for i in range(len(vb)):
results[filename]["Visuelle Beurteilung " + str(i)] = vb[i]
update_progress(progress, i+1, n)
root.update()
i+=1
print("###")
print(results)
print("###")
write_data(output, results, save_name)
def write_data(output, data, save_name):
workbook = Workbook()
sheet = workbook.active
filenames = list(data.keys())
col = ["Probe", "Substrat", "Coarse Reject", "Fine Reject", "Reject Gesamt", "Abdampfrückstand", "CSB", "pH- Wert", "Leitfähigkeit", "Blattklebetest Grob", "Blattklebetest Fein", "Visuelle Beurteilung Grob", "Visuelle Beurteilung Fein"]
bold_font = Font(bold=True)
for i in range(len(col)):
sheet.cell(row=i+1, column=1, value=col[i])
sheet.cell(row=i+1, column=1).font = bold_font
for i in range(len(filenames)):
sheet.cell(row=1, column=i+2, value=data[filenames[i]].get("Displayname"))
sheet.cell(row=1, column=i+2).font = bold_font
sheet.cell(row=2, column=i+2, value="?")
sheet.cell(row=3, column=i+2, value=data[filenames[i]].get("Coarse Reject"))
sheet.cell(row=4, column=i+2, value=data[filenames[i]].get("Fine Reject"))
sheet.cell(row=5, column=i+2, value=float(data[filenames[i]].get("Coarse Reject")) + float(data[filenames[i]].get("Fine Reject")))
sheet.cell(row=6, column=i+2, value=data[filenames[i]].get("Abdampfrückstand"))
sheet.cell(row=7, column=i+2, value=data[filenames[i]].get("CSB"))
sheet.cell(row=8, column=i+2, value=data[filenames[i]].get("pH-Wert"))
sheet.cell(row=9, column=i+2, value=data[filenames[i]].get("Leitfähigkeit"))
sheet.cell(row=10, column=i+2, value=data[filenames[i]].get("Blattklebetest 0"))
sheet.cell(row=11, column=i+2, value=data[filenames[i]].get("Blattklebetest 1"))
sheet.cell(row=12, column=i+2, value=data[filenames[i]].get("Visuelle Beurteilung 0"))
sheet.cell(row=13, column=i+2, value=data[filenames[i]].get("Visuelle Beurteilung 1"))
# Auto-fit column widths
for col in sheet.columns:
max_length = 0
column = col[0].column_letter # Get the column name
for cell in col:
try:
# Get the length of the value as a string
cell_value = str(cell.value)
if len(cell_value) > max_length:
max_length = len(cell_value)
except:
pass
adjusted_width = (max_length + 2)
sheet.column_dimensions[column].width = adjusted_width
# Formating in %
# iter the rows
for row in range(3, 7):
# iter the cells
for cell in sheet[row]:
cell.number_format = numbers.FORMAT_PERCENTAGE_00
centered_style = Alignment(horizontal='center', vertical='center')
#center all
for row in sheet.iter_rows():
for cell in row:
cell.alignment = centered_style
workbook.save(output + "/" + save_name + ".xlsx")
def directory_in():
global input_dir
input_dir = filedialog.askdirectory()
print(input_dir)
def directory_out():
global output_dir
output_dir = filedialog.askdirectory()
print(output_dir)
def save_data():
text_input = input_field.get()
print(input_dir)
print(output_dir)
print(text_input)
if text_input == "" or input_dir == "" or output_dir == "":
if input_dir == "" or output_dir == "":
messagebox.showerror(title="Directory Error", message="No Directory found")
return
elif text_input == "":
messagebox.showerror(title="File Error", message="Invalid Filename")
return
else:
messagebox.showerror(title="Unknown Error", message="Unknown Error")
else:
if os.path.isfile(output_dir+"/"+text_input+".xlsx"):
messagebox.showwarning(title="Attention!", message="File " + text_input + " already exists in " + output_dir)
else:
read_data(input_dir, output_dir, text_input)
os.startfile(output_dir)
# Mainprogram
ctk.set_appearance_mode("Dark") # Modes: "System" (standard), "Dark", "Light"
ctk.set_default_color_theme("green") # Themes: "blue" (standard), "green", "dark-blue"
app = ctk.CTk()
app.geometry("320x300")
app.title("Summarize CEPI")
frame_1 = ctk.CTkFrame(master=app)
frame_1.pack(pady=10, padx=10, fill="both", expand=True)
label_1 = ctk.CTkLabel(master=frame_1, justify=ctk.LEFT, text="Summarize CEPI-Sheets (V22.4 and up)")
label_1.pack(pady=10, padx=10)
frame_2 = ctk.CTkFrame(master=frame_1)
frame_2.pack(fill="both", expand=True)
button_1 = ctk.CTkButton(master=frame_2, command=directory_in, text="Data Input", width=150)
button_1.pack(pady=10, padx=10)
button_2 = ctk.CTkButton(master=frame_2, command=directory_out, text="Data Output", width=150)
button_2.pack(pady=10, padx=10)
entry_1 = ctk.CTkEntry(master=frame_2, placeholder_text="Enter Filename", width=150)
entry_1.pack(pady=10, padx=10)
frame_3 = ctk.CTkFrame(master=frame_2)
frame_3.pack(fill="both", expand=True)
button_3 = ctk.CTkButton(master=frame_3, command=save_data, text="Start Sumarize", width=150)
button_3.pack(pady=10, padx=10)
def update_progress(progress, value, total):
"""Aktualisiert den Fortschritt der Fortschrittsleiste."""
progress['value'] = int(value / total * 100)
app.mainloop()

View file

@ -0,0 +1,241 @@
import os
from openpyxl import *
from openpyxl.styles import Font
from openpyxl.styles import numbers
from openpyxl.styles import Alignment
import tkinter as tk
from tkinter import filedialog
from tkinter import messagebox
from tkinter import font
from tkinter import ttk
input_dir = ""
output_dir = ""
def unify_data(data):
if isinstance(data, float) or isinstance(data, int):
return float(data)
else:
return 0.0
def read_data(input, output, save_name):
results = {}
n=len(os.listdir(input))
i=0
for filename in os.listdir(input):
if filename.endswith('.xlsx'): # sicherstellen, dass es sich um eine Excel-Datei handelt
filepath = os.path.join(input, filename)
workbook = load_workbook(filepath, data_only=True)
if "Nasslaborteil" in workbook.sheetnames:
print("vorhanden " + filename)
else:
print("nicht vorhanden " + filename)
continue
worksheet = workbook["Nasslaborteil"]
results[filename] = {}
# print(filename)
column_index = 2 # Spalte B
for row in worksheet.iter_rows():
for cell in row:
if cell.value == "Probennr. und -bezeichnung":
displayname = worksheet.cell(row=cell.row, column=column_index + 2)
results[filename]["Displayname"] = displayname.value
break
if cell.value == 'Abdampfrückstand Probe [%]':
ars = worksheet.cell(row=cell.row, column=column_index + 1)
print(type(ars.value))
results[filename]["Abdampfrückstand"] = unify_data(ars.value)
print(unify_data(ars.value))
break
if cell.value == "Messwert [mg/l]":
csb = worksheet.cell(row=cell.row, column=column_index + 2)
results[filename]["CSB"] = unify_data(csb.value)
break
if cell.value == "pH-Wert":
ph = worksheet.cell(row=cell.row, column=column_index + 1)
results[filename]["pH-Wert"] = unify_data(ph.value)
break
if cell.value == "Leitfähigkeit":
lf = worksheet.cell(row=cell.row, column=column_index + 1)
results[filename]["Leitfähigkeit"] = unify_data(lf.value)
break
if cell.value == "Rückstand Grob incl. dry removed components":
coarse = worksheet.cell(row=cell.row, column=column_index + 1)
results[filename]["Coarse Reject"] = unify_data(coarse.value)
break
if cell.value == "fine reject (related to total product)" or cell.value == "Rückstandsanteil an Einwaage otro [%]":
fine = worksheet.cell(row=cell.row, column=column_index + 1)
results[filename]["Fine Reject"] = unify_data(fine.value)
break
#Load WBPage for BKT and save in Liste cuz 2 values
worksheet_bkt = workbook["Blattklebetest"]
bkt = []
for row in worksheet_bkt.iter_rows():
for cell in row:
if cell.value == "Evaluation":
bkt.append(worksheet_bkt.cell(row=cell.row, column=column_index + 3).value)
for i in range(len(bkt)):
results[filename]["Blattklebetest " + str(i)] = bkt[i]
worksheet_vb = workbook["visuelle Beurteilung"]
vb = []
for row in worksheet_vb.iter_rows():
for cell in row:
if cell.value == "Evaluation":
vb.append(worksheet_vb.cell(row=cell.row, column=column_index + 4).value)
for i in range(len(vb)):
results[filename]["Visuelle Beurteilung " + str(i)] = vb[i]
update_progress(progress, i+1, n)
root.update()
i+=1
print("###")
print(results)
print("###")
write_data(output, results, save_name)
def write_data(output, data, save_name):
workbook = Workbook()
sheet = workbook.active
filenames = list(data.keys())
col = ["Probe", "Substrat", "Coarse Reject", "Fine Reject", "Reject Gesamt", "Abdampfrückstand", "CSB", "pH- Wert", "Leitfähigkeit", "Blattklebetest Grob", "Blattklebetest Fein", "Visuelle Beurteilung Grob", "Visuelle Beurteilung Fein"]
bold_font = Font(bold=True)
for i in range(len(col)):
sheet.cell(row=i+1, column=1, value=col[i])
sheet.cell(row=i+1, column=1).font = bold_font
for i in range(len(filenames)):
sheet.cell(row=1, column=i+2, value=data[filenames[i]].get("Displayname"))
sheet.cell(row=1, column=i+2).font = bold_font
sheet.cell(row=2, column=i+2, value="?")
sheet.cell(row=3, column=i+2, value=data[filenames[i]].get("Coarse Reject"))
sheet.cell(row=4, column=i+2, value=data[filenames[i]].get("Fine Reject"))
sheet.cell(row=5, column=i+2, value=float(data[filenames[i]].get("Coarse Reject")) + float(data[filenames[i]].get("Fine Reject")))
sheet.cell(row=6, column=i+2, value=data[filenames[i]].get("Abdampfrückstand"))
sheet.cell(row=7, column=i+2, value=data[filenames[i]].get("CSB"))
sheet.cell(row=8, column=i+2, value=data[filenames[i]].get("pH-Wert"))
sheet.cell(row=9, column=i+2, value=data[filenames[i]].get("Leitfähigkeit"))
sheet.cell(row=10, column=i+2, value=data[filenames[i]].get("Blattklebetest 0"))
sheet.cell(row=11, column=i+2, value=data[filenames[i]].get("Blattklebetest 1"))
sheet.cell(row=12, column=i+2, value=data[filenames[i]].get("Visuelle Beurteilung 0"))
sheet.cell(row=13, column=i+2, value=data[filenames[i]].get("Visuelle Beurteilung 1"))
# Auto-fit column widths
for col in sheet.columns:
max_length = 0
column = col[0].column_letter # Get the column name
for cell in col:
try:
# Get the length of the value as a string
cell_value = str(cell.value)
if len(cell_value) > max_length:
max_length = len(cell_value)
except:
pass
adjusted_width = (max_length + 2)
sheet.column_dimensions[column].width = adjusted_width
# Formating in %
# iter the rows
for row in range(3, 7):
# iter the cells
for cell in sheet[row]:
cell.number_format = numbers.FORMAT_PERCENTAGE_00
centered_style = Alignment(horizontal='center', vertical='center')
#center all
for row in sheet.iter_rows():
for cell in row:
cell.alignment = centered_style
workbook.save(output + "/" + save_name + ".xlsx")
def directory_in():
global input_dir
input_dir = filedialog.askdirectory()
print(input_dir)
def directory_out():
global output_dir
output_dir = filedialog.askdirectory()
print(output_dir)
def save_data():
text_input = input_field.get()
print(input_dir)
print(output_dir)
print(text_input)
if text_input == "" or input_dir == "" or output_dir == "":
if input_dir == "" or output_dir == "":
messagebox.showerror(title="Directory Error", message="No Directory found")
return
elif text_input == "":
messagebox.showerror(title="File Error", message="Invalid Filename")
return
else:
messagebox.showerror(title="Unknown Error", message="Unknown Error")
else:
if os.path.isfile(output_dir+"/"+text_input+".xlsx"):
messagebox.showwarning(title="Attention!", message="File " + text_input + " already exists in " + output_dir)
else:
read_data(input_dir, output_dir, text_input)
os.startfile(output_dir)
# Mainprogram
root = tk.Tk()
root.geometry("300x250")
root.title("CEPI-Summarize")
root.resizable(width=False, height=False)
head_font = ("Helvetica", 10, "bold")
head_label = tk.Label(root, text="Summarize Multiple CEPI-Datasheets")
head_label.pack(pady=10)
head_label.configure(font=head_font)
frame = tk.Frame(root)
frame.pack()
directory_button = tk.Button(frame, text="Input Data", command=directory_in, width=10)
directory_button.pack(pady=5, side=tk.LEFT, padx=3)
directory_button_out = tk.Button(frame, text="Output Data", command=directory_out, width=10)
directory_button_out.pack(pady=5, side=tk.RIGHT, padx=3)
sub_label = tk.Label(root, text="Enter Filename of Summary")
sub_label.pack(pady=5)
frame2 = tk.Frame(root)
frame2.pack()
input_field = tk.Entry(frame2, width=17)
input_field.pack(pady=5, side=tk.LEFT)
xlsxlabel = tk.Label(frame2, text=".xlsx")
xlsxlabel.pack(side=tk.RIGHT)
save_button = tk.Button(root, text="Resumir", command=save_data, width=10)
save_button.pack(pady=10)
progress = ttk.Progressbar(root, orient="horizontal", length=200, mode="determinate")
progress.pack(pady=10)
def update_progress(progress, value, total):
"""Aktualisiert den Fortschritt der Fortschrittsleiste."""
progress['value'] = int(value / total * 100)
root.mainloop()

View file

@ -0,0 +1,164 @@
import tkinter
import tkinter.messagebox
import customtkinter
customtkinter.set_appearance_mode("System") # Modes: "System" (standard), "Dark", "Light"
customtkinter.set_default_color_theme("blue") # Themes: "blue" (standard), "green", "dark-blue"
class App(customtkinter.CTk):
def __init__(self):
super().__init__()
# configure window
self.title("CustomTkinter complex_example.py")
self.geometry(f"{1100}x{580}")
# configure grid layout (4x4)
self.grid_columnconfigure(1, weight=1)
self.grid_columnconfigure((2, 3), weight=0)
self.grid_rowconfigure((0, 1, 2), weight=1)
# create sidebar frame with widgets
self.sidebar_frame = customtkinter.CTkFrame(self, width=140, corner_radius=0)
self.sidebar_frame.grid(row=0, column=0, rowspan=4, sticky="nsew")
self.sidebar_frame.grid_rowconfigure(4, weight=1)
self.logo_label = customtkinter.CTkLabel(self.sidebar_frame, text="CustomTkinter", font=customtkinter.CTkFont(size=20, weight="bold"))
self.logo_label.grid(row=0, column=0, padx=20, pady=(20, 10))
self.sidebar_button_1 = customtkinter.CTkButton(self.sidebar_frame, command=self.sidebar_button_event)
self.sidebar_button_1.grid(row=1, column=0, padx=20, pady=10)
self.sidebar_button_2 = customtkinter.CTkButton(self.sidebar_frame, command=self.sidebar_button_event)
self.sidebar_button_2.grid(row=2, column=0, padx=20, pady=10)
self.sidebar_button_3 = customtkinter.CTkButton(self.sidebar_frame, command=self.sidebar_button_event)
self.sidebar_button_3.grid(row=3, column=0, padx=20, pady=10)
self.appearance_mode_label = customtkinter.CTkLabel(self.sidebar_frame, text="Appearance Mode:", anchor="w")
self.appearance_mode_label.grid(row=5, column=0, padx=20, pady=(10, 0))
self.appearance_mode_optionemenu = customtkinter.CTkOptionMenu(self.sidebar_frame, values=["Light", "Dark", "System"],
command=self.change_appearance_mode_event)
self.appearance_mode_optionemenu.grid(row=6, column=0, padx=20, pady=(10, 10))
self.scaling_label = customtkinter.CTkLabel(self.sidebar_frame, text="UI Scaling:", anchor="w")
self.scaling_label.grid(row=7, column=0, padx=20, pady=(10, 0))
self.scaling_optionemenu = customtkinter.CTkOptionMenu(self.sidebar_frame, values=["80%", "90%", "100%", "110%", "120%"],
command=self.change_scaling_event)
self.scaling_optionemenu.grid(row=8, column=0, padx=20, pady=(10, 20))
# create main entry and button
self.entry = customtkinter.CTkEntry(self, placeholder_text="CTkEntry")
self.entry.grid(row=3, column=1, columnspan=2, padx=(20, 0), pady=(20, 20), sticky="nsew")
self.main_button_1 = customtkinter.CTkButton(master=self, fg_color="transparent", border_width=2, text_color=("gray10", "#DCE4EE"))
self.main_button_1.grid(row=3, column=3, padx=(20, 20), pady=(20, 20), sticky="nsew")
# create textbox
self.textbox = customtkinter.CTkTextbox(self, width=250)
self.textbox.grid(row=0, column=1, padx=(20, 0), pady=(20, 0), sticky="nsew")
# create tabview
self.tabview = customtkinter.CTkTabview(self, width=250)
self.tabview.grid(row=0, column=2, padx=(20, 0), pady=(20, 0), sticky="nsew")
self.tabview.add("CTkTabview")
self.tabview.add("Tab 2")
self.tabview.add("Tab 3")
self.tabview.tab("CTkTabview").grid_columnconfigure(0, weight=1) # configure grid of individual tabs
self.tabview.tab("Tab 2").grid_columnconfigure(0, weight=1)
self.optionmenu_1 = customtkinter.CTkOptionMenu(self.tabview.tab("CTkTabview"), dynamic_resizing=False,
values=["Value 1", "Value 2", "Value Long Long Long"])
self.optionmenu_1.grid(row=0, column=0, padx=20, pady=(20, 10))
self.combobox_1 = customtkinter.CTkComboBox(self.tabview.tab("CTkTabview"),
values=["Value 1", "Value 2", "Value Long....."])
self.combobox_1.grid(row=1, column=0, padx=20, pady=(10, 10))
self.string_input_button = customtkinter.CTkButton(self.tabview.tab("CTkTabview"), text="Open CTkInputDialog",
command=self.open_input_dialog_event)
self.string_input_button.grid(row=2, column=0, padx=20, pady=(10, 10))
self.label_tab_2 = customtkinter.CTkLabel(self.tabview.tab("Tab 2"), text="CTkLabel on Tab 2")
self.label_tab_2.grid(row=0, column=0, padx=20, pady=20)
# create radiobutton frame
self.radiobutton_frame = customtkinter.CTkFrame(self)
self.radiobutton_frame.grid(row=0, column=3, padx=(20, 20), pady=(20, 0), sticky="nsew")
self.radio_var = tkinter.IntVar(value=0)
self.label_radio_group = customtkinter.CTkLabel(master=self.radiobutton_frame, text="CTkRadioButton Group:")
self.label_radio_group.grid(row=0, column=2, columnspan=1, padx=10, pady=10, sticky="")
self.radio_button_1 = customtkinter.CTkRadioButton(master=self.radiobutton_frame, variable=self.radio_var, value=0)
self.radio_button_1.grid(row=1, column=2, pady=10, padx=20, sticky="n")
self.radio_button_2 = customtkinter.CTkRadioButton(master=self.radiobutton_frame, variable=self.radio_var, value=1)
self.radio_button_2.grid(row=2, column=2, pady=10, padx=20, sticky="n")
self.radio_button_3 = customtkinter.CTkRadioButton(master=self.radiobutton_frame, variable=self.radio_var, value=2)
self.radio_button_3.grid(row=3, column=2, pady=10, padx=20, sticky="n")
# create slider and progressbar frame
self.slider_progressbar_frame = customtkinter.CTkFrame(self, fg_color="transparent")
self.slider_progressbar_frame.grid(row=1, column=1, padx=(20, 0), pady=(20, 0), sticky="nsew")
self.slider_progressbar_frame.grid_columnconfigure(0, weight=1)
self.slider_progressbar_frame.grid_rowconfigure(4, weight=1)
self.seg_button_1 = customtkinter.CTkSegmentedButton(self.slider_progressbar_frame)
self.seg_button_1.grid(row=0, column=0, padx=(20, 10), pady=(10, 10), sticky="ew")
self.progressbar_1 = customtkinter.CTkProgressBar(self.slider_progressbar_frame)
self.progressbar_1.grid(row=1, column=0, padx=(20, 10), pady=(10, 10), sticky="ew")
self.progressbar_2 = customtkinter.CTkProgressBar(self.slider_progressbar_frame)
self.progressbar_2.grid(row=2, column=0, padx=(20, 10), pady=(10, 10), sticky="ew")
self.slider_1 = customtkinter.CTkSlider(self.slider_progressbar_frame, from_=0, to=1, number_of_steps=4)
self.slider_1.grid(row=3, column=0, padx=(20, 10), pady=(10, 10), sticky="ew")
self.slider_2 = customtkinter.CTkSlider(self.slider_progressbar_frame, orientation="vertical")
self.slider_2.grid(row=0, column=1, rowspan=5, padx=(10, 10), pady=(10, 10), sticky="ns")
self.progressbar_3 = customtkinter.CTkProgressBar(self.slider_progressbar_frame, orientation="vertical")
self.progressbar_3.grid(row=0, column=2, rowspan=5, padx=(10, 20), pady=(10, 10), sticky="ns")
# create scrollable frame
self.scrollable_frame = customtkinter.CTkScrollableFrame(self, label_text="CTkScrollableFrame")
self.scrollable_frame.grid(row=1, column=2, padx=(20, 0), pady=(20, 0), sticky="nsew")
self.scrollable_frame.grid_columnconfigure(0, weight=1)
self.scrollable_frame_switches = []
for i in range(100):
switch = customtkinter.CTkSwitch(master=self.scrollable_frame, text=f"CTkSwitch {i}")
switch.grid(row=i, column=0, padx=10, pady=(0, 20))
self.scrollable_frame_switches.append(switch)
# create checkbox and switch frame
self.checkbox_slider_frame = customtkinter.CTkFrame(self)
self.checkbox_slider_frame.grid(row=1, column=3, padx=(20, 20), pady=(20, 0), sticky="nsew")
self.checkbox_1 = customtkinter.CTkCheckBox(master=self.checkbox_slider_frame)
self.checkbox_1.grid(row=1, column=0, pady=(20, 0), padx=20, sticky="n")
self.checkbox_2 = customtkinter.CTkCheckBox(master=self.checkbox_slider_frame)
self.checkbox_2.grid(row=2, column=0, pady=(20, 0), padx=20, sticky="n")
self.checkbox_3 = customtkinter.CTkCheckBox(master=self.checkbox_slider_frame)
self.checkbox_3.grid(row=3, column=0, pady=20, padx=20, sticky="n")
# set default values
self.sidebar_button_3.configure(state="disabled", text="Disabled CTkButton")
self.checkbox_3.configure(state="disabled")
self.checkbox_1.select()
self.scrollable_frame_switches[0].select()
self.scrollable_frame_switches[4].select()
self.radio_button_3.configure(state="disabled")
self.appearance_mode_optionemenu.set("Dark")
self.scaling_optionemenu.set("100%")
self.optionmenu_1.set("CTkOptionmenu")
self.combobox_1.set("CTkComboBox")
self.slider_1.configure(command=self.progressbar_2.set)
self.slider_2.configure(command=self.progressbar_3.set)
self.progressbar_1.configure(mode="indeterminnate")
self.progressbar_1.start()
self.textbox.insert("0.0", "CTkTextbox\n\n" + "Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua.\n\n" * 20)
self.seg_button_1.configure(values=["CTkSegmentedButton", "Value 2", "Value 3"])
self.seg_button_1.set("Value 2")
def open_input_dialog_event(self):
dialog = customtkinter.CTkInputDialog(text="Type in a number:", title="CTkInputDialog")
print("CTkInputDialog:", dialog.get_input())
def change_appearance_mode_event(self, new_appearance_mode: str):
customtkinter.set_appearance_mode(new_appearance_mode)
def change_scaling_event(self, new_scaling: str):
new_scaling_float = int(new_scaling.replace("%", "")) / 100
customtkinter.set_widget_scaling(new_scaling_float)
def sidebar_button_event(self):
print("sidebar_button click")
if __name__ == "__main__":
app = App()
app.mainloop()

2
rust-database-master/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
/target
Cargo.lock

View file

@ -0,0 +1,7 @@
{
"workbench.colorCustomizations": {
"activityBar.background": "#3E2919",
"titleBar.activeBackground": "#573A23",
"titleBar.activeForeground": "#FCFAF9"
}
}

View file

@ -0,0 +1,12 @@
[package]
name = "rust-database"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
anyhow = { version = "1.0.70", features = ["backtrace"] }
mysql = "23.0.1"
serde = { version = "1.0.160", features = ["derive"] }
serde_json = "1.0.96"

View file

@ -0,0 +1,13 @@
use super::info::Info;
use super::result::Result;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct ConfigID(pub(crate) i64);
#[derive(Debug)]
pub struct Configuration {
pub id: ConfigID,
pub time: i64,
pub infos: Vec<Info>,
pub results: Vec<Result>,
}

View file

@ -0,0 +1,22 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
pub struct DataBaseConfig<'a> {
pub user: &'a str,
pub password: &'a str,
pub url: &'a str,
pub port: u16,
pub schema: &'a str,
}
impl<'a> DataBaseConfig<'a> {
pub fn new(user: &'a str, password: &'a str, url: &'a str, port: u16, schema: &'a str) -> Self {
Self {
user,
password,
url,
port,
schema,
}
}
}

View file

@ -0,0 +1,12 @@
use super::configuration::ConfigID;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct InfoID(pub(crate) i64);
#[derive(Debug)]
pub struct Info {
pub id: InfoID,
pub config_id: ConfigID,
pub name: String,
pub value: String,
}

View file

@ -0,0 +1,630 @@
mod configuration;
mod data_base_config;
mod info;
mod measurement;
mod meta_info;
mod result;
mod rights;
mod table_names;
pub use configuration::Configuration;
pub use data_base_config::DataBaseConfig;
pub use measurement::Measurement;
pub use meta_info::MetaInfo;
pub use rights::Rights;
pub use table_names::TableNames;
use anyhow::{anyhow, Result};
use mysql::{params, prelude::Queryable, Pool, PooledConn, Row};
use self::{
configuration::ConfigID,
info::{Info, InfoID},
measurement::{MeasurementID, OutMeasurement},
result::ResultID,
};
use result::Result as DBResult;
pub struct PTSDataBase<'a> {
pool: Pool,
table_names: TableNames<'a>,
_config: DataBaseConfig<'a>,
}
impl<'a> PTSDataBase<'a> {
pub fn new(config: DataBaseConfig<'a>, table_names: TableNames<'a>) -> Result<Self> {
let url = format!(
"mysql://{}:{}@{}:{}/{}",
config.user, config.password, config.url, config.port, config.schema
);
let pool = Pool::new(url.as_str())?;
pool.get_conn()?
.exec_drop(&format!("USE {};", config.schema), ())?;
Ok(Self {
pool,
table_names,
_config: config,
})
}
fn access<F, T>(&self, f: F) -> Result<T>
where
F: FnOnce(PooledConn) -> Result<T>,
{
let conn = self.pool.get_conn()?;
let res = f(conn)?;
Ok(res)
}
}
// writer
impl<'a> PTSDataBase<'a> {
pub fn write_measurement(
&mut self,
meta_info: &[MetaInfo],
unix_time_stamp: i64,
status: &str,
measurements: &[Measurement],
) -> Result<(ConfigID, ResultID)> {
let config_id = self.get_config(meta_info, unix_time_stamp)?;
let result_id = self.write_result(status, unix_time_stamp, config_id)?;
self.write_measurements(result_id, measurements)?;
Ok((config_id, result_id))
}
pub fn remove_result(&mut self, result_id: ResultID) -> Result<()> {
self.access(|mut conn| {
let result_stmt = conn.prep(&format!(
"DELETE
FROM {}
WHERE id=?;",
self.table_names.results
))?;
let measurement_stmt = conn.prep(&format!(
"DELETE
FROM {}
WHERE result_id=?;",
self.table_names.measurements
))?;
conn.exec_drop(result_stmt, (result_id.0,))?;
conn.exec_drop(measurement_stmt, (result_id.0,))?;
Ok(())
})
}
}
// reader
impl<'a> PTSDataBase<'a> {
pub fn configuration_ids(&mut self) -> Result<Vec<ConfigID>> {
self.access(|mut conn| {
let mut query = conn.query_iter(&format!(
"SELECT *
FROM {};",
self.table_names.configurations
))?;
let mut ids = Vec::new();
while let Some(r) = query.iter() {
for row in r {
ids.push(ConfigID(
row?.get("id")
.ok_or(anyhow!("could not convert config id"))?,
));
}
}
Ok(ids)
})
}
pub fn configuration(&mut self, id: ConfigID) -> Result<Option<Configuration>> {
Ok(self
.access(|mut conn| {
Ok(conn
.query_first(&format!(
"SELECT *
FROM {}
WHERE id=\"{}\";",
self.table_names.configurations, id.0
))?
.map(|row: Row| -> Result<Configuration> {
let configs = Configuration {
id: ConfigID(
row.get("id")
.ok_or(anyhow!("could not convert config id"))?,
),
time: row
.get("time")
.ok_or(anyhow!("could not convert unix time"))?,
infos: Vec::new(),
results: Vec::new(),
};
Ok(configs)
})
.transpose()?)
})?
.map(|mut config| -> Result<Configuration> {
self.infos(&mut config)?;
self.results(&mut config)?;
Ok(config)
})
.transpose()?)
}
pub fn configurations(&mut self) -> Result<Vec<Configuration>> {
let mut configs = self.access(|mut conn| {
let mut query = conn.query_iter(&format!(
"SELECT *
FROM {};",
self.table_names.configurations
))?;
let mut configs = Vec::new();
while let Some(r) = query.iter() {
for row in r {
let row = row?;
configs.push(Configuration {
id: ConfigID(
row.get("id")
.ok_or(anyhow!("could not convert config id"))?,
),
time: row
.get("time")
.ok_or(anyhow!("could not convert unix time"))?,
infos: Vec::new(),
results: Vec::new(),
});
}
}
Ok(configs)
})?;
for config in configs.iter_mut() {
self.infos(config)?;
self.results(config)?;
}
Ok(configs)
}
pub fn infos(&mut self, configuration: &mut Configuration) -> Result<()> {
self.access(|mut conn| {
let mut query = conn.query_iter(&format!(
"SELECT *
FROM {}
WHERE config_id=\"{}\";",
self.table_names.infos, configuration.id.0,
))?;
while let Some(r) = query.iter() {
for row in r {
let row = row?;
configuration.infos.push(Info {
id: InfoID(row.get("id").ok_or(anyhow!("could not convert info id"))?),
config_id: ConfigID(
row.get("config_id")
.ok_or(anyhow!("could not convert config_id"))?,
),
name: row.get("name").ok_or(anyhow!("could not convert name"))?,
value: row.get("value").ok_or(anyhow!("could not convert value"))?,
});
}
}
Ok(())
})
}
pub fn results(&mut self, configuration: &mut Configuration) -> Result<()> {
self.access(|mut conn| {
let mut query = conn.query_iter(&format!(
"SELECT *
FROM {}
WHERE config_id=\"{}\";",
self.table_names.results, configuration.id.0,
))?;
while let Some(r) = query.iter() {
for row in r {
let row = row?;
configuration.results.push(DBResult {
id: ResultID(
row.get("id")
.ok_or(anyhow!("could not convert result id"))?,
),
config_id: ConfigID(
row.get("config_id")
.ok_or(anyhow!("could not convert config_id"))?,
),
time: row
.get("time")
.ok_or(anyhow!("could not convert unix time"))?,
status: row
.get("status")
.ok_or(anyhow!("could not convert status"))?,
measurements: Vec::new(),
});
}
}
Ok(())
})?;
for result in configuration.results.iter_mut() {
self.measurements(result)?;
}
Ok(())
}
pub fn measurements(&mut self, result: &mut DBResult) -> Result<()> {
self.access(|mut conn| {
let mut query = conn.query_iter(&format!(
"SELECT *
FROM {}
WHERE result_id=\"{}\";",
self.table_names.measurements, result.id.0,
))?;
while let Some(r) = query.iter() {
for row in r {
let row = row?;
result.measurements.push(OutMeasurement {
id: MeasurementID(
row.get("id")
.ok_or(anyhow!("could not convert measurement id"))?,
),
result_id: ResultID(
row.get("result_id")
.ok_or(anyhow!("could not convert result id"))?,
),
name: row.get("name").ok_or(anyhow!("could not convert name"))?,
value: row.get("value").ok_or(anyhow!("could not convert value"))?,
rights: {
let tmp: Option<String> = row
.get("rights")
.ok_or(anyhow!("could not convert rights"))?;
tmp.map(|rights| Rights::try_from(rights)).transpose()?
},
});
}
}
Ok(())
})
}
pub fn search_infos(&mut self, meta_info: &[MetaInfo]) -> Result<Option<ConfigID>> {
if meta_info.len() == 0 {
return Ok(None);
}
let mut query = format!(
"SELECT m.config_id \n FROM {} AS m ",
self.table_names.infos
);
if meta_info.len() > 1 {
for (index, _) in meta_info.iter().enumerate() {
let letter = format!("n{}", index);
query += &format!(
"LEFT JOIN {} AS {} \n ON m.config_id={}.config_id ",
self.table_names.infos, letter, letter
);
}
}
query += &format!(
"WHERE m.name=\'{}\' AND m.value=\'{}\' ",
meta_info[0].name, meta_info[0].value
);
if meta_info.len() > 1 {
for (index, meta) in meta_info.iter().enumerate() {
let letter = format!("n{}", index);
query += &format!(
"AND {letter}.name=\'{}\' AND {letter}.value=\'{}\' ",
meta.name, meta.value
);
}
}
query += ";";
self.access(|mut conn| Ok(conn.query_first(query)?.map(|i| ConfigID(i))))
}
pub fn check_for_infos(&mut self, meta_info: &[MetaInfo]) -> Result<bool> {
Ok(self.search_infos(meta_info)?.is_some())
}
}
// helper
impl<'a> PTSDataBase<'a> {
fn write_result(
&mut self,
status: &str,
unix_time_stamp: i64,
config_id: ConfigID,
) -> Result<ResultID> {
self.access(|mut conn| {
conn.exec_drop(
&format!(
"INSERT INTO `{}` (status, time, config_id)
VALUES(:status, :time_stamp, :config_id);",
self.table_names.results
),
params! {
"status" => status,
"time_stamp" => unix_time_stamp,
"config_id" => config_id.0,
},
)?;
Ok(ResultID(conn.last_insert_id() as i64))
})
}
fn write_measurements(
&mut self,
result_id: ResultID,
measurements: &[Measurement],
) -> Result<()> {
self.access(|mut conn| {
conn.exec_batch(
&format!(
"INSERT INTO `{}` (result_id, name, value, rights)
VALUES(:result_id, :name, :value, :rights);",
self.table_names.measurements
),
measurements.iter().map(|measurement| {
params! {
"result_id" => result_id.0,
"name" => measurement.name,
"value" => measurement.value,
"rights" => measurement.rights,
}
}),
)?;
Ok(())
})
}
fn new_config(&mut self, unix_time_stamp: i64) -> Result<ConfigID> {
self.access(|mut conn| {
let stmt = conn.prep(&format!(
"INSERT INTO `{}` (time)
VALUES(?);",
self.table_names.configurations
))?;
conn.exec_drop(stmt, (unix_time_stamp,))?;
Ok(ConfigID(conn.last_insert_id() as i64))
})
}
fn insert_info(&mut self, meta_info: &[MetaInfo], unix_time_stamp: i64) -> Result<ConfigID> {
let id = self.new_config(unix_time_stamp)?;
self.access(|mut conn| {
conn.exec_batch(
&format!(
"INSERT INTO `{}` (name, value, config_id)
VALUES(:name, :value, :config_id);",
self.table_names.infos
),
meta_info.iter().map(|meta| {
params! {
"name" => &meta.name,
"value" => &meta.value,
"config_id" => id.0
}
}),
)?;
Ok(())
})?;
Ok(id)
}
fn get_config<'b>(&mut self, meta_info: &[MetaInfo], unix_time_stamp: i64) -> Result<ConfigID> {
match self.search_infos(meta_info)? {
Some(id) => Ok(id),
None => self.insert_info(&meta_info, unix_time_stamp),
}
}
}
#[cfg(test)]
mod test {
use std::net::Ipv4Addr;
use crate::rights::Rights;
use super::*;
use anyhow::Result;
use serde_json::{from_str, to_string_pretty};
const T: &str = "
{
\"configurations\": \"api_configurations\",
\"infos\": \"api_info\",
\"measurements\": \"api_measurements\",
\"results\": \"api_results\"
}";
const C: &str = "
{
\"user\": \"device\",
\"password\": \"password\",
\"url\": \"127.0.0.1\",
\"port\": 3306,
\"schema\": \"ptsdata\"
}";
#[test]
fn connect_to_pts_db() -> Result<()> {
let db_info: &str = "
{
\"user\": \"device\",
\"password\": \"password\",
\"url\": \"127.0.0.1\",
\"port\": 3306,
\"schema\": \"ptsdata\"
}";
let table_names = from_str(T)?;
let config = from_str(db_info)?;
PTSDataBase::new(config, table_names)?;
Ok(())
}
#[test]
fn db_config_read_write() -> Result<()> {
const CONF_FILE: &str = "db_test_config.json";
let lh = Ipv4Addr::LOCALHOST.to_string();
let conf = DataBaseConfig::new(
"test_user",
"test_password",
lh.as_str(),
62345,
"test_schema",
);
std::fs::write(CONF_FILE, to_string_pretty(&conf)?)?;
let s = std::fs::read_to_string(CONF_FILE)?;
let c = from_str(&s)?;
assert_eq!(conf, c);
Ok(())
}
#[test]
fn table_names_read_write() -> Result<()> {
const TB_NAME_FILE: &str = "table_names_test.json";
let table_names = TableNames {
configurations: "configs",
infos: "infos",
measurements: "measurements",
results: "results",
};
std::fs::write(TB_NAME_FILE, to_string_pretty(&table_names)?)?;
let s = std::fs::read_to_string(TB_NAME_FILE)?;
let c = from_str(&s)?;
assert_eq!(table_names, c);
Ok(())
}
#[test]
fn establish_db_connection() -> Result<()> {
let table_names = from_str(T)?;
let config = from_str(C)?;
assert!(PTSDataBase::new(config, table_names).is_ok());
Ok(())
}
#[test]
fn test_read() -> Result<()> {
let table_names = from_str(T)?;
let config = from_str(C)?;
let mut db = PTSDataBase::new(config, table_names)?;
let ids = db.configuration_ids()?;
if let Some(id) = ids.first() {
assert!(db.configuration(*id).is_ok());
}
Ok(())
}
#[test]
fn test_write() -> Result<()> {
let table_names = from_str(T)?;
let config = from_str(C)?;
let mut db = PTSDataBase::new(config, table_names)?;
let meta_info = [("Test", "1.0").into()];
let measurements = [Measurement {
name: "TestKey",
value: 2.0,
rights: Some(Rights::PremiumUser),
}];
let (config_id, result_id) = db.write_measurement(&meta_info, 0, "Ok", &measurements)?;
// check values written to DB
{
let config = db
.configuration(config_id)?
.expect("config id should be available!");
let result = config
.results
.iter()
.find(|result| result.id == result_id)
.expect("result is expected to be available");
assert_eq!(result.measurements.len(), 1);
assert_eq!(result.measurements[0].name, measurements[0].name);
assert_eq!(result.measurements[0].value, measurements[0].value);
assert_eq!(result.measurements[0].rights, measurements[0].rights);
}
db.remove_result(result_id)?;
// check that values are successfully cleared afterwards
{
let config = db
.configuration(config_id)?
.expect("config id should be available");
assert!(config
.results
.iter()
.find(|result| result.id == result_id)
.is_none());
}
Ok(())
}
}

View file

@ -0,0 +1,21 @@
use crate::rights::Rights;
use super::result::ResultID;
pub struct Measurement<'a> {
pub name: &'a str,
pub value: f32,
pub rights: Option<Rights>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct MeasurementID(pub(crate) i64);
#[derive(Debug)]
pub struct OutMeasurement {
pub id: MeasurementID,
pub name: String,
pub value: f32,
pub result_id: ResultID,
pub rights: Option<Rights>,
}

View file

@ -0,0 +1,41 @@
#[derive(Debug)]
pub struct MetaInfo {
pub name: String,
pub value: String,
}
impl<'a> From<(&'a str, &'a str)> for MetaInfo {
fn from((name, value): (&'a str, &'a str)) -> Self {
Self {
name: name.to_string(),
value: value.to_string(),
}
}
}
impl<'a> From<(&'a String, &'a str)> for MetaInfo {
fn from((name, value): (&'a String, &'a str)) -> Self {
Self {
name: name.to_string(),
value: value.to_string(),
}
}
}
impl<'a> From<(&'a str, &'a String)> for MetaInfo {
fn from((name, value): (&'a str, &'a String)) -> Self {
Self {
name: name.to_string(),
value: value.to_string(),
}
}
}
impl<'a> From<(&'a String, &'a String)> for MetaInfo {
fn from((name, value): (&'a String, &'a String)) -> Self {
Self {
name: name.to_string(),
value: value.to_string(),
}
}
}

View file

@ -0,0 +1,13 @@
use super::{configuration::ConfigID, measurement::OutMeasurement};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct ResultID(pub(crate) i64);
#[derive(Debug)]
pub struct Result {
pub id: ResultID,
pub time: i64,
pub config_id: ConfigID,
pub status: String,
pub measurements: Vec<OutMeasurement>,
}

View file

@ -0,0 +1,35 @@
use anyhow::Result;
use serde::{Deserialize, Serialize};
use serde_json::from_str;
#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum Rights {
#[serde(rename = "Super-User")]
SuperUser,
#[serde(rename = "PTS-User")]
PTSUser,
#[serde(rename = "Premium-User")]
PremiumUser,
#[serde(rename = "Standard-User")]
StandardUser,
#[serde(rename = "Test-User")]
TestUser,
}
impl TryFrom<String> for Rights {
type Error = anyhow::Error;
fn try_from(value: String) -> Result<Self> {
Ok(from_str(&value)?)
}
}
impl Into<mysql::Value> for Rights {
fn into(self) -> mysql::Value {
serde_json::to_string(&self).unwrap().into()
}
}

View file

@ -0,0 +1,9 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
pub struct TableNames<'a> {
pub configurations: &'a str,
pub infos: &'a str,
pub measurements: &'a str,
pub results: &'a str,
}