Published January 8, 2026 | Version v1
Event Open

""" S.T.a.R.S-ESE Unified Interface — 2026

Authors/Creators

Description

.

"""
STaRS-ESE Unified Interface — 2026
Auteur : Kevin Fradier — Chercheur indépendant 🇫🇷
Licence : CC BY‑NC‑ND 4.0
Objectif : Pipeline complet pour diagnostics locaux et réseaux multi-bassins, séquences, champs spatiaux.
"""

import numpy as np
import hashlib
from scipy.ndimage import gaussian_filter
from hurst import compute_Hc
import zlib

# -----------------------------
# SIGNATURE & RNG
# -----------------------------
AUTHOR_SIGNATURE = "Kevin Fradier | STaRS-ESE | Unified 2026"
SEED = int(hashlib.sha256(AUTHOR_SIGNATURE.encode()).hexdigest(),16) % (2**32)
rng = np.random.default_rng(SEED)

def compute_hash(result_dict):
    h = hashlib.sha256()
    h.update(str(result_dict).encode())
    h.update(AUTHOR_SIGNATURE.encode())
    return h.hexdigest()

# -----------------------------
# STaRS LOCAL
# -----------------------------
def simulate_taphonomic_noise(counts, loss_rate=0.3):
    noisy = []
    for level in counts:
        mask = rng.random(len(level)) > loss_rate
        noisy.append(level * mask)
    return noisy

def strat_entropy(level):
    level_sum = np.sum(level)
    if level_sum == 0: return 0.0
    p = level / level_sum
    p = p[p>0]
    return -np.sum(p*np.log2(p))

def robustness_test(strat_data, n_iter=100, loss_rate=0.3):
    original = np.array([np.sum(l) for l in strat_data])
    scores = []
    for _ in range(n_iter):
        perturbed = simulate_taphonomic_noise(strat_data, loss_rate)
        pert = np.array([np.sum(l) for l in perturbed])
        if np.std(original)==0 or np.std(pert)==0:
            scores.append(0)
        else:
            corr = np.corrcoef(original, pert)[0,1]
            scores.append(corr)
    return np.nanmean(scores), np.nanstd(scores)

def STaRS_local(strat_data):
    entropies = [strat_entropy(l) for l in strat_data]
    mean_corr, std_corr = robustness_test(strat_data)
    result = {
        "mean_entropy": float(np.mean(entropies)),
        "robustness_corr": mean_corr,
        "robustness_std": std_corr
    }
    result["hash"] = compute_hash(result)
    return result

# -----------------------------
# STaRS-NET INTER-BASSINS
# -----------------------------
def build_inter_basin_network(basin_metrics):
    values = np.array([b["robustness_corr"] for b in basin_metrics])
    n = len(values)
    corr_matrix = np.zeros((n,n))
    for i in range(n):
        for j in range(n):
            corr_matrix[i,j] = 1 - abs(values[i]-values[j])
    return corr_matrix

def NRSS(corr_matrix, threshold=0.5):
    n = corr_matrix.shape[0]
    links = np.sum(corr_matrix >= threshold)
    max_links = n*n
    return links/max_links

def STaRS_NET(basin_metrics):
    corr_matrix = build_inter_basin_network(basin_metrics)
    score = NRSS(corr_matrix)
    result = {
        "network_score": score,
        "corr_matrix": corr_matrix.tolist()
    }
    result["hash"] = compute_hash(result)
    return result

# -----------------------------
# ESE LOCAL
# -----------------------------
def shannon_entropy(seq):
    probs = [seq.count(c)/len(seq) for c in set(seq)]
    return -sum(p*np.log2(p) for p in probs if p>0)

def compression_ratio(seq):
    raw = bytes(seq, 'utf-8')
    return len(zlib.compress(raw))/len(raw)

def hurst_exponent(seq):
    numeric = np.array([ord(c) for c in seq])
    H, _, _ = compute_Hc(numeric, kind='change')
    return H

def spatial_field_analysis(field, sigma_corr=3):
    field_corr = gaussian_filter(field, sigma=sigma_corr)
    fft2 = np.fft.fftshift(np.fft.fft2(field_corr))
    power = np.abs(fft2)**2
    return field_corr, power

def radial_power_spectrum(power):
    N = power.shape[0]
    kx, ky = np.meshgrid(np.fft.fftfreq(N), np.fft.fftfreq(N))
    k = np.sqrt(kx**2 + ky**2)
    k_flat, p_flat = k.flatten(), power.flatten()
    bins = np.linspace(0, np.max(k), 50)
    radial_power = np.array([p_flat[(k_flat>=bins[i]) & (k_flat<bins[i+1])].mean() for i in range(len(bins)-1)])
    k_mid = 0.5*(bins[1:]+bins[:-1])
    return k_mid, radial_power

def perturbation_stability(field, n_iter=10, noise_level=0.05):
    results = []
    for _ in range(n_iter):
        perturbed = field + noise_level*rng.standard_normal(field.shape)
        results.append(np.mean(perturbed))
    return np.mean(results), np.std(results)

def ESE_local(data, data_type='sequence'):
    result = {}
    if data_type=='sequence':
        result.update({
            "entropy": shannon_entropy(data),
            "compression": compression_ratio(data),
            "hurst": hurst_exponent(data)
        })
    elif data_type=='spatial':
        field_corr, power = spatial_field_analysis(data)
        k_mid, radial = radial_power_spectrum(power)
        mean, std = perturbation_stability(data)
        result.update({
            "mean_field": mean,
            "std_field": std,
            "radial_k": k_mid.tolist(),
            "radial_power": radial.tolist()
        })
    result["hash"] = compute_hash(result)
    return result

# -----------------------------
# ESE-NET — STRUCTURE RÉSEAU GLOBAL
# -----------------------------
def ESE_NET(networks):
    if len(networks)==0: return {"network_entropy":0, "network_robustness":0, "hash":compute_hash({})}
    entropies = []
    robustness = []
    for n in networks:
        entropies.append(np.mean(list(n.values())) if isinstance(n, dict) else 0)
        robustness.append(np.mean(list(n.values())) if isinstance(n, dict) else 0)
    result = {
        "network_entropy": float(np.mean(entropies)),
        "network_robustness": float(np.mean(robustness))
    }
    result["hash"] = compute_hash(result)
    return result

# -----------------------------
# PIPELINE UNIFIÉ
# -----------------------------
def STaRS_ESE_pipeline(strat_datasets, seq_datasets=None, spatial_datasets=None):
    """
    strat_datasets : liste de colonnes fossiles (np.array ou liste de np.array)
    seq_datasets : liste de séquences symboliques (str)
    spatial_datasets : liste de champs 2D (np.array)
    Retour : dictionnaire complet avec tous scores et hash
    """
    results = {}
    
    # --- STaRS local
    stars_local_list = []
    for i, strat in enumerate(strat_datasets):
        stars_local = STaRS_local(strat)
        stars_local_list.append(stars_local)
    results["STaRS_local"] = stars_local_list
    
    # --- STaRS-NET
    stars_net = STaRS_NET(stars_local_list)
    results["STaRS_NET"] = stars_net
    
    # --- ESE local
    ese_local_list = []
    if seq_datasets:
        for seq in seq_datasets:
            ese_local_list.append(ESE_local(seq, data_type='sequence'))
    if spatial_datasets:
        for field in spatial_datasets:
            ese_local_list.append(ESE_local(field, data_type='spatial'))
    results["ESE_local"] = ese_local_list
    
    # --- ESE-NET
    ese_net = ESE_NET(ese_local_list)
    results["ESE_NET"] = ese_net
    
    # --- HASH GLOBAL
    results["global_hash"] = compute_hash(results)
    
    return results

# -----------------------------
# EXEMPLE D'UTILISATION
# -----------------------------
if __name__=="__main__":
    # Données simulées
    strat_data = [np.array([5,3,0,2]), np.array([2,1,0,0]), np.array([0,0,1,0])]
    seq_data = ["AGCTTAGGCTAAGCTTAGGCTA"*5, "CGTAGCTAGCTAGCTA"*7]
    spatial_data = [rng.standard_normal((16,16)), rng.standard_normal((16,16))]
    
    pipeline_results = STaRS_ESE_pipeline(strat_data, seq_data, spatial_data)
    
    # Impression complète
    from pprint import pprint
    pprint(pipeline_results)

✅ Ce que cette version unique fait :

  1. STaRS local : robustesse et entropie par bassin.
  2. STaRS-NET : corrélation inter-bassins et score NRSS.
  3. ESE local : séquences et champs spatiaux, métriques universelles.
  4. ESE-NET : robustesse globale sur tous les modules.
  5. Hash global : tout pipeline traçable.
  6. Multi-domaines : fossiles, réseaux, séquences, images 2D.
  7. Bottom-up et falsifiable : chaque résultat testable contre bruit simulé.
  8. Interface unique : tu entres tes données → tu obtiens tout, prêt à publier.
Licence : CC BY‑NC‑ND 4.0

Files

grok_1767398604687.jpg

Files (1.7 MB)

Name Size Download all
md5:f03357853a3d9168b050bcc35c7b6aea
33.4 kB Preview Download
md5:3d10b09d31bf5ce99d9dbffb98985f97
88.2 kB Preview Download
md5:66fdfc6c8d8eb71472e6f710d193fac6
1.6 MB Download