RUTGS / index.html
Dmtlant's picture
Update index.html
de46ccf verified
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>NeuroFractal Music Box</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<style>
html, body {
margin: 0; padding: 0;
background: black;
overflow: hidden;
font-family: monospace;
}
#hydra-canvas {
position: absolute;
top: 0; left: 0;
width: 100vw;
height: 100vh;
image-rendering: pixelated;
z-index: 0;
}
#terminal {
position: absolute;
bottom: 0;
width: 100%;
height: 200px;
background: rgba(0,0,0,0.75);
color: white;
border: none;
padding: 10px;
resize: none;
font-size: 14px;
line-height: 1.4;
z-index: 2;
}
#instructions {
position: absolute;
bottom: 10px;
left: 10px;
width: 250px;
max-height: 200px;
background: rgba(0,0,0,0.75);
color: white;
padding: 10px;
font-size: 12px;
line-height: 1.4;
z-index: 2;
overflow-y: auto;
display: none;
}
#btns {
position: absolute;
top: 10px;
left: 10px;
z-index: 3;
}
button {
background: #222;
color: white;
border: 1px solid #555;
padding: 6px 10px;
margin-right: 5px;
margin-bottom: 5px;
font-size: 12px;
cursor: pointer;
transition: background 0.2s, transform 0.2s;
}
button.active {
background: #155;
border-color: #2aa;
}
button.pulsing {
animation: pulse 1s infinite alternate;
}
@keyframes pulse {
0% { transform: scale(1); }
100% { transform: scale(1.1); }
}
</style>
</head>
<body>
<canvas id="hydra-canvas"></canvas>
<div id="btns">
<button id="btn-terminal" onclick="terminal.style.display = terminal.style.display === 'none' ? 'block' : 'none'">🧠 Открыть терминал</button>
<button id="btn-code" onclick="eval(terminal.value)">▶️ Запуск кода</button>
<button id="btn-dream" onclick="insertDream()">💤 Нейросон</button>
<button id="btn-osc1" onclick="insertOscPreset1()">🌊 OSC Пресет 1</button>
<button id="btn-osc2" onclick="insertOscPreset2()">🔥 OSC Пресет 2</button>
<button id="btn-osc3" onclick="insertOscPreset3()">⚡️ OSC Пресет 3</button>
<button id="btn-audio" onclick="toggleAudio()">🔊 Вкл/Выкл звук</button>
<button id="btn-voice" onclick="toggleVoiceDetection()">🎤 Вкл/Выкл голос</button>
<button id="btn-formant" onclick="toggleFormantSynthesis()">🗣️ Вкл/Выкл речь</button>
<button id="btn-speech" onclick="toggleSpeechSynthesis()">🗣️ Вкл/Выкл синтез речи</button>
<button id="btn-randomize" onclick="randomizeParameters()">🎲 Рандомизировать</button>
<button id="btn-instructions" onclick="instructions.style.display = instructions.style.display === 'none' ? 'block' : 'none'">📜 Инструкции</button>
</div>
<textarea id="terminal" spellcheck="false" style="display:none"></textarea>
<div id="instructions" style="display:none">
<strong>Инструкции:</strong><br>
1. Используйте <strong>наушники</strong> для бинауральных ритмов.<br>
2. Разрешите доступ к <strong>микрофону</strong> и <strong>камере</strong>.<br>
3. <strong>Говорите/пойте</strong> (800-3000 Гц) для активации голоса и речи.<br>
4. <strong>Двигайтесь перед камерой</strong> для визуальной реакции.<br>
5. Кнопки:<br>
- 🧠: Показать/скрыть терминал для кода.<br>
- ▶️: Запустить код из терминала.<br>
- 💤: Основная визуализация (глобус, ЭЭГ, радар).<br>
- 🌊/🔥/⚡️: OSC-пресеты (разные ритмы).<br>
- 🔊: Вкл/выкл звук (бинауральные ритмы, природа).<br>
- 🎤: Вкл/выкл голосовое управление.<br>
- 🗣️ (речь): Вкл/выкл формантный синтез.<br>
- 🗣️ (синтез): Вкл/выкл случайные слова.<br>
- 🎲: Рандомизация параметров.<br>
- 📜: Показать/скрыть инструкции.<br>
6. <strong>Радар</strong> (вверху справа) показывает окружение.<br>
7. Кнопки <strong>пульсируют</strong>, если есть голос/камера.<br>
8. Существо говорит случайные слова, пауза при активности.<br>
Проблемы? Проверьте консоль (F12) и разрешения.
</div>
<script src="https://unpkg.com/hydra-synth"></script>
<script>
const canvas = document.getElementById("hydra-canvas");
canvas.width = window.innerWidth * window.devicePixelRatio;
canvas.height = window.innerHeight * window.devicePixelRatio;
const hydra = new Hydra({
detectAudio: true,
canvas: canvas,
width: canvas.width,
height: canvas.height,
pixelRatio: window.devicePixelRatio
});
// Web Audio API: Binaural beats, formant synthesis, nature sounds, voice detection
const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
let oscillators = [];
let gainNodes = [];
let analyser = null;
let voiceActive = false;
let formantSynthesisActive = false;
let speechSynthesisActive = false;
let audioPlaying = false;
let formantIntensity = 0;
let camIntensity = 0;
let isPausedForThought = false;
// Randomizer state
let randomParams = {
deltaFreq: 200,
thetaFreq: 220,
alphaFreq: 240,
betaFreq: 260,
formant1Freq: 700,
formant2Freq: 1200,
oscFreqGlobe: 5,
oscFreqLasers: 8,
oscFreqEEG1: 4,
oscFreqEEG2: 1,
kaleidGlobe: 2,
kaleidLasers: 2,
kaleidPreset1: 2,
kaleidPreset2: 2,
kaleidPreset3: 2,
colorR: 0.2,
colorG: 0.5,
colorB: 1
};
function createBinauralAndSounds() {
// Binaural Beats
const deltaLeft = audioCtx.createOscillator();
deltaLeft.type = 'sine';
deltaLeft.frequency.setValueAtTime(randomParams.deltaFreq, audioCtx.currentTime);
const deltaLeftGain = audioCtx.createGain();
deltaLeftGain.gain.setValueAtTime(0.04, audioCtx.currentTime);
const deltaLeftPan = audioCtx.createStereoPanner();
deltaLeftPan.pan.setValueAtTime(-1, audioCtx.currentTime);
deltaLeft.connect(deltaLeftGain).connect(deltaLeftPan).connect(audioCtx.destination);
const deltaRight = audioCtx.createOscillator();
deltaRight.type = 'sine';
deltaRight.frequency.setValueAtTime(randomParams.deltaFreq + 2, audioCtx.currentTime);
const deltaRightGain = audioCtx.createGain();
deltaRightGain.gain.setValueAtTime(0.04, audioCtx.currentTime);
const deltaRightPan = audioCtx.createStereoPanner();
deltaRightPan.pan.setValueAtTime(1, audioCtx.currentTime);
deltaRight.connect(deltaRightGain).connect(deltaRightPan).connect(audioCtx.destination);
const thetaLeft = audioCtx.createOscillator();
thetaLeft.type = 'sine';
thetaLeft.frequency.setValueAtTime(randomParams.thetaFreq, audioCtx.currentTime);
const thetaLeftGain = audioCtx.createGain();
thetaLeftGain.gain.setValueAtTime(0.03, audioCtx.currentTime);
const thetaLeftPan = audioCtx.createStereoPanner();
thetaLeftPan.pan.setValueAtTime(-1, audioCtx.currentTime);
thetaLeft.connect(thetaLeftGain).connect(thetaLeftPan).connect(audioCtx.destination);
const thetaRight = audioCtx.createOscillator();
thetaRight.type = 'sine';
thetaRight.frequency.setValueAtTime(randomParams.thetaFreq + 6, audioCtx.currentTime);
const thetaRightGain = audioCtx.createGain();
thetaRightGain.gain.setValueAtTime(0.03, audioCtx.currentTime);
const thetaRightPan = audioCtx.createStereoPanner();
thetaRightPan.pan.setValueAtTime(1, audioCtx.currentTime);
thetaRight.connect(thetaRightGain).connect(thetaRightPan).connect(audioCtx.destination);
const alphaLeft = audioCtx.createOscillator();
alphaLeft.type = 'sine';
alphaLeft.frequency.setValueAtTime(randomParams.alphaFreq, audioCtx.currentTime);
const alphaLeftGain = audioCtx.createGain();
alphaLeftGain.gain.setValueAtTime(0.02, audioCtx.currentTime);
const alphaLeftPan = audioCtx.createStereoPanner();
alphaLeftPan.pan.setValueAtTime(-1, audioCtx.currentTime);
alphaLeft.connect(alphaLeftGain).connect(alphaLeftPan).connect(audioCtx.destination);
const alphaRight = audioCtx.createOscillator();
alphaRight.type = 'sine';
alphaRight.frequency.setValueAtTime(randomParams.alphaFreq + 10, audioCtx.currentTime);
const alphaRightGain = audioCtx.createGain();
alphaRightGain.gain.setValueAtTime(0.02, audioCtx.currentTime);
const alphaRightPan = audioCtx.createStereoPanner();
alphaRightPan.pan.setValueAtTime(1, audioCtx.currentTime);
alphaRight.connect(alphaRightGain).connect(alphaRightPan).connect(audioCtx.destination);
const betaLeft = audioCtx.createOscillator();
betaLeft.type = 'sine';
betaLeft.frequency.setValueAtTime(randomParams.betaFreq, audioCtx.currentTime);
const betaLeftGain = audioCtx.createGain();
betaLeftGain.gain.setValueAtTime(0.015, audioCtx.currentTime);
const betaLeftPan = audioCtx.createStereoPanner();
betaLeftPan.pan.setValueAtTime(-1, audioCtx.currentTime);
betaLeft.connect(betaLeftGain).connect(betaLeftPan).connect(audioCtx.destination);
const betaRight = audioCtx.createOscillator();
betaRight.type = 'sine';
betaRight.frequency.setValueAtTime(randomParams.betaFreq + 20, audioCtx.currentTime);
const betaRightGain = audioCtx.createGain();
betaRightGain.gain.setValueAtTime(0.015, audioCtx.currentTime);
const betaRightPan = audioCtx.createStereoPanner();
betaRightPan.pan.setValueAtTime(1, audioCtx.currentTime);
betaRight.connect(betaRightGain).connect(betaRightPan).connect(audioCtx.destination);
// Formant Synthesis: Vowel-like sounds (/a/)
const formant1 = audioCtx.createOscillator();
formant1.type = 'sine';
formant1.frequency.setValueAtTime(randomParams.formant1Freq, audioCtx.currentTime);
const formant1Gain = audioCtx.createGain();
formant1Gain.gain.setValueAtTime(0, audioCtx.currentTime);
const formant1Filter = audioCtx.createBiquadFilter();
formant1Filter.type = 'bandpass';
formant1Filter.frequency.setValueAtTime(randomParams.formant1Freq, audioCtx.currentTime);
formant1Filter.Q.setValueAtTime(10, audioCtx.currentTime);
formant1.connect(formant1Gain).connect(formant1Filter).connect(audioCtx.destination);
const formant2 = audioCtx.createOscillator();
formant2.type = 'sine';
formant2.frequency.setValueAtTime(randomParams.formant2Freq, audioCtx.currentTime);
const formant2Gain = audioCtx.createGain();
formant2Gain.gain.setValueAtTime(0, audioCtx.currentTime);
const formant2Filter = audioCtx.createBiquadFilter();
formant2Filter.type = 'bandpass';
formant2Filter.frequency.setValueAtTime(randomParams.formant2Freq, audioCtx.currentTime);
formant2Filter.Q.setValueAtTime(10, audioCtx.currentTime);
formant2.connect(formant2Gain).connect(formant2Filter).connect(audioCtx.destination);
// Subtle Nature Sounds
const waterNoise = audioCtx.createBufferSource();
const bufferSize = audioCtx.sampleRate * 2;
const buffer = audioCtx.createBuffer(1, bufferSize, audioCtx.sampleRate);
const data = buffer.getChannelData(0);
for (let i = 0; i < bufferSize; i++) {
data[i] = Math.random() * 2 - 1;
}
waterNoise.buffer = buffer;
waterNoise.loop = true;
const waterFilter = audioCtx.createBiquadFilter();
waterFilter.type = 'lowpass';
waterFilter.frequency.setValueAtTime(400, audioCtx.currentTime);
const waterGain = audioCtx.createGain();
waterGain.gain.setValueAtTime(0.01, audioCtx.currentTime);
waterNoise.connect(waterFilter).connect(waterGain).connect(audioCtx.destination);
const birdOsc = audioCtx.createOscillator();
birdOsc.type = 'sine';
birdOsc.frequency.setValueAtTime(1000, audioCtx.currentTime);
const birdGain = audioCtx.createGain();
birdGain.gain.setValueAtTime(0, audioCtx.currentTime);
birdOsc.connect(birdGain).connect(audioCtx.destination);
setInterval(() => {
const freq = 800 + Math.random() * 400;
birdOsc.frequency.setValueAtTime(freq, audioCtx.currentTime);
birdGain.gain.setValueAtTime(0.01, audioCtx.currentTime);
birdGain.gain.setValueAtTime(0, audioCtx.currentTime + 0.1);
}, 3000);
const forestNoise = audioCtx.createBufferSource();
const forestBuffer = audioCtx.createBuffer(1, bufferSize, audioCtx.sampleRate);
const forestData = forestBuffer.getChannelData(0);
for (let i = 0; i < bufferSize; i++) {
forestData[i] = Math.random() * 0.3 - 0.15;
}
forestNoise.buffer = forestBuffer;
forestNoise.loop = true;
const forestFilter = audioCtx.createBiquadFilter();
forestFilter.type = 'lowpass';
forestFilter.frequency.setValueAtTime(150, audioCtx.currentTime);
const forestGain = audioCtx.createGain();
forestGain.gain.setValueAtTime(0.01, audioCtx.currentTime);
forestNoise.connect(forestFilter).connect(forestGain).connect(audioCtx.destination);
oscillators = [deltaLeft, deltaRight, thetaLeft, thetaRight, alphaLeft, alphaRight, betaLeft, betaRight, formant1, formant2, waterNoise, birdOsc, forestNoise];
gainNodes = [deltaLeftGain, deltaRightGain, thetaLeftGain, thetaRightGain, alphaLeftGain, alphaRightGain, betaLeftGain, betaRightGain, formant1Gain, formant2Gain, waterGain, birdGain, forestGain];
}
// Formant-based voice detection (mid-range focus)
function setupVoiceDetection(stream) {
analyser = audioCtx.createAnalyser();
analyser.fftSize = 256;
const source = audioCtx.createMediaStreamSource(stream);
source.connect(analyser);
const dataArray = new Float32Array(analyser.frequencyBinCount);
const sampleRate = audioCtx.sampleRate;
const binWidth = sampleRate / analyser.fftSize;
function detectFormants() {
analyser.getFloatFrequencyData(dataArray);
let midRangeIntensity = 0;
for (let i = 0; i < dataArray.length; i++) {
const freq = i * binWidth;
if (freq >= 800 && freq <= 3000) {
midRangeIntensity = Math.max(midRangeIntensity, dataArray[i] > -100 ? dataArray[i] + 100 : 0);
}
}
formantIntensity = Math.min(midRangeIntensity / 80, 1);
if (voiceActive && audioPlaying) {
gainNodes[0].gain.setValueAtTime(0.04 + formantIntensity * 0.04, audioCtx.currentTime);
gainNodes[2].gain.setValueAtTime(0.03 + formantIntensity * 0.03, audioCtx.currentTime);
gainNodes[4].gain.setValueAtTime(0.02 + formantIntensity * 0.02, audioCtx.currentTime);
gainNodes[6].gain.setValueAtTime(0.015 + formantIntensity * 0.015, audioCtx.currentTime);
gainNodes[8].gain.setValueAtTime(formantSynthesisActive ? formantIntensity * 0.05 : 0, audioCtx.currentTime);
gainNodes[9].gain.setValueAtTime(formantSynthesisActive ? formantIntensity * 0.05 : 0, audioCtx.currentTime);
gainNodes[10].gain.setValueAtTime(0.01 + formantIntensity * 0.01, audioCtx.currentTime);
gainNodes[11].gain.setValueAtTime(0.01 + formantIntensity * 0.01, audioCtx.currentTime);
gainNodes[12].gain.setValueAtTime(0.01 + formantIntensity * 0.01, audioCtx.currentTime);
}
updateButtonStates();
requestAnimationFrame(detectFormants);
}
detectFormants();
}
// Camera intensity detection
function setupCamera() {
try {
s0.initCam();
setInterval(() => {
const ctx = canvas.getContext('2d');
const frame = ctx.getImageData(0, 0, canvas.width, canvas.height);
let brightness = 0;
for (let i = 0; i < frame.data.length; i += 4) {
brightness += (frame.data[i] + frame.data[i + 1] + frame.data[i + 2]) / 3;
}
camIntensity = Math.min(brightness / (frame.data.length / 4) / 255, 1);
updateButtonStates();
}, 100);
} catch (e) {
console.error("Camera initialization failed:", e);
camIntensity = 0;
}
}
// T9-like nonsense word generator tied to OSC EEG rhythms
function generateNonsenseWord() {
const vowels = ['a', 'e', 'i', 'o', 'u'];
const consonants = ['b', 'd', 'g', 'k', 'm', 'n', 'p', 's', 't', 'w', 'z'];
let word = '';
const length = Math.floor(2 + Math.random() * 3);
for (let i = 0; i < length; i++) {
word += consonants[Math.floor(Math.random() * consonants.length)];
word += vowels[Math.floor(Math.random() * vowels.length)];
}
const oscInfluence = randomParams.betaFreq / 270;
if (Math.random() < oscInfluence) word += '-' + vowels[Math.floor(Math.random() * vowels.length)];
return word;
}
// Web Speech API with pause and thought
let speechInterval;
function speakNonsense() {
if (!speechSynthesisActive || isPausedForThought) return;
if (formantIntensity > 0.5 || camIntensity > 0.5) {
isPausedForThought = true;
speechSynthesis.cancel();
const word = generateNonsenseWord();
const utterance = new SpeechSynthesisUtterance(`I sense ${word}`);
utterance.pitch = 0.8 + formantIntensity * 0.4;
utterance.volume = 0.5 + formantIntensity * 0.3;
utterance.rate = 0.8 + (randomParams.betaFreq - 250) / 20 * 0.4;
utterance.onend = () => {
setTimeout(() => {
isPausedForThought = false;
}, 3000);
};
speechSynthesis.speak(utterance);
} else {
const utterance = new SpeechSynthesisUtterance(generateNonsenseWord());
utterance.pitch = 0.8 + formantIntensity * 0.4;
utterance.volume = 0.5 + formantIntensity * 0.3;
utterance.rate = 0.8 + (randomParams.betaFreq - 250) / 20 * 0.4;
speechSynthesis.speak(utterance);
}
}
function toggleSpeechSynthesis() {
speechSynthesisActive = !speechSynthesisActive;
if (speechSynthesisActive) {
speakNonsense();
speechInterval = setInterval(speakNonsense, 2000);
} else {
clearInterval(speechInterval);
speechSynthesis.cancel();
isPausedForThought = false;
}
updateButtonStates();
}
// Update button states
function updateButtonStates() {
document.getElementById('btn-audio').className = audioPlaying ? 'active pulsing' : '';
document.getElementById('btn-voice').className = voiceActive ? 'active pulsing' : '';
document.getElementById('btn-formant').className = formantSynthesisActive ? 'active pulsing' : '';
document.getElementById('btn-speech').className = speechSynthesisActive ? 'active pulsing' : '';
document.getElementById('btn-randomize').className = formantIntensity > 0.3 || camIntensity > 0.3 ? 'active pulsing' : '';
document.getElementById('btn-dream').className = formantIntensity > 0.3 || camIntensity > 0.3 ? 'active pulsing' : '';
document.getElementById('btn-osc1').className = formantIntensity > 0.3 || camIntensity > 0.3 ? 'active pulsing' : '';
document.getElementById('btn-osc2').className = formantIntensity > 0.3 || camIntensity > 0.3 ? 'active pulsing' : '';
document.getElementById('btn-osc3').className = formantIntensity > 0.3 || camIntensity > 0.3 ? 'active pulsing' : '';
document.getElementById('btn-instructions').className = document.getElementById('instructions').style.display === 'block' ? 'active' : '';
}
function randomizeParameters() {
randomParams = {
deltaFreq: 190 + Math.random() * 20,
thetaFreq: 210 + Math.random() * 20,
alphaFreq: 230 + Math.random() * 20,
betaFreq: 250 + Math.random() * 20,
formant1Freq: 600 + Math.random() * 200,
formant2Freq: 1100 + Math.random() * 200,
oscFreqGlobe: 4 + Math.random() * 3,
oscFreqLasers: 7 + Math.random() * 3,
oscFreqEEG1: 3 + Math.random() * 2,
oscFreqEEG2: 0.5 + Math.random() * 1,
kaleidGlobe: Math.floor(2 + Math.random() * 2),
kaleidLasers: Math.floor(2 + Math.random() * 2),
kaleidPreset1: Math.floor(2 + Math.random() * 2),
kaleidPreset2: Math.floor(2 + Math.random() * 2),
kaleidPreset3: Math.floor(2 + Math.random() * 2),
colorR: Math.random() * 0.5,
colorG: Math.random() * 0.5 + 0.3,
colorB: Math.random() * 0.5 + 0.5
};
if (audioPlaying) {
oscillators[0].frequency.setValueAtTime(randomParams.deltaFreq, audioCtx.currentTime);
oscillators[1].frequency.setValueAtTime(randomParams.deltaFreq + 2, audioCtx.currentTime);
oscillators[2].frequency.setValueAtTime(randomParams.thetaFreq, audioCtx.currentTime);
oscillators[3].frequency.setValueAtTime(randomParams.thetaFreq + 6, audioCtx.currentTime);
oscillators[4].frequency.setValueAtTime(randomParams.alphaFreq, audioCtx.currentTime);
oscillators[5].frequency.setValueAtTime(randomParams.alphaFreq + 10, audioCtx.currentTime);
oscillators[6].frequency.setValueAtTime(randomParams.betaFreq, audioCtx.currentTime);
oscillators[7].frequency.setValueAtTime(randomParams.betaFreq + 20, audioCtx.currentTime);
oscillators[8].frequency.setValueAtTime(randomParams.formant1Freq, audioCtx.currentTime);
oscillators[9].frequency.setValueAtTime(randomParams.formant2Freq, audioCtx.currentTime);
gainNodes[8].frequency.setValueAtTime(randomParams.formant1Freq, audioCtx.currentTime);
gainNodes[9].frequency.setValueAtTime(randomParams.formant2Freq, audioCtx.currentTime);
}
insertDream();
updateButtonStates();
}
function toggleAudio() {
if (audioPlaying) {
oscillators.forEach(osc => osc.stop());
oscillators = [];
gainNodes = [];
audioPlaying = false;
} else {
createBinauralAndSounds();
oscillators.forEach(osc => osc.start());
audioPlaying = true;
}
updateButtonStates();
}
function toggleVoiceDetection() {
voiceActive = !voiceActive;
if (!voiceActive) {
if (audioPlaying) {
gainNodes[0].gain.setValueAtTime(0.04, audioCtx.currentTime);
gainNodes[2].gain.setValueAtTime(0.03, audioCtx.currentTime);
gainNodes[4].gain.setValueAtTime(0.02, audioCtx.currentTime);
gainNodes[6].gain.setValueAtTime(0.015, audioCtx.currentTime);
gainNodes[8].gain.setValueAtTime(0, audioCtx.currentTime);
gainNodes[9].gain.setValueAtTime(0, audioCtx.currentTime);
gainNodes[10].gain.setValueAtTime(0.01, audioCtx.currentTime);
gainNodes[11].gain.setValueAtTime(0.01, audioCtx.currentTime);
gainNodes[12].gain.setValueAtTime(0.01, audioCtx.currentTime);
}
formantIntensity = 0;
}
updateButtonStates();
}
function toggleFormantSynthesis() {
formantSynthesisActive = !formantSynthesisActive;
if (!formantSynthesisActive && audioPlaying) {
gainNodes[8].gain.setValueAtTime(0, audioCtx.currentTime);
gainNodes[9].gain.setValueAtTime(0, audioCtx.currentTime);
}
updateButtonStates();
}
// Запрашиваем аудио и камеру
navigator.mediaDevices.getUserMedia({ audio: true, video: true })
.then(stream => {
setupVoiceDetection(stream);
setupCamera();
insertDream();
toggleAudio();
toggleVoiceDetection();
})
.catch(err => {
console.error("Media access failed:", err);
navigator.mediaDevices.getUserMedia({ audio: true })
.then(stream => {
setupVoiceDetection(stream);
insertDream();
toggleAudio();
toggleVoiceDetection();
})
.catch(err => {
console.error("Audio access failed:", err);
insertDream();
toggleAudio();
});
});
// Основной нейро-фрактальный сон
function insertDream() {
terminal.value = `
// Вращающийся фрактальный глобус
osc(${randomParams.oscFreqGlobe}, 0.1, 1)
.color(${randomParams.colorR}, ${randomParams.colorG}, ${randomParams.colorB}, 0.9)
.rotate(0, () => time * 0.02)
.scale(() => 1 + a.fft[0] * 0.2 + ${formantIntensity} * 0.15 + ${camIntensity} * 0.1)
.modulate(src(s0), 0.05)
.kaleid(${randomParams.kaleidGlobe})
.out(o0)
// Лазерное сканирование
osc(${randomParams.oscFreqLasers}, 0.1, 1)
.color(1, 0, 0)
.modulate(noise(1, 0.05), 0.03)
.kaleid(${randomParams.kaleidLasers})
.scale(() => 1 + ${formantIntensity} * 0.1 + ${camIntensity} * 0.05)
.modulate(src(s0), 0.03)
.out(o1)
// Первый ЭЭГ: OSC, реагирует на звук и голос
osc(${randomParams.oscFreqEEG1}, 0.1)
.scale(() => 1 + a.fft[0] * 0.3 + ${formantIntensity} * 0.2 + ${camIntensity} * 0.1)
.rotate(0, () => time * 0.01 + ${formantIntensity} * 0.01)
.color(0.5, 0.5, 1, 0.7)
.out(o2)
// Второй ЭЭГ: модерирует первый
src(o2)
.modulate(osc(${randomParams.oscFreqEEG2}, 0.05).add(() => a.fft[1] * 0.1 + ${formantIntensity} * 0.05 + ${camIntensity} * 0.05), 0.03)
.scale(1.1)
.color(0.7, 0.2, 1, 0.8)
.out(o3)
// Нейронный декодер
osc(6, 0.1)
.diff(osc(7, 0.1))
.posterize(2)
.colorama(0.05)
.scale(() => 1 + ${formantIntensity} * 0.1 + ${camIntensity} * 0.05)
.modulate(src(s0), 0.03)
.out(o4)
// Биометрические показатели: пульс
osc(3, 0, 0.5)
.color(1, 0, 0)
.scale(0.3, 0.04)
.scrollY(-0.4)
.out(o5)
// Биометрические показатели: нейроактивность
osc(4, 0.1, 0.5)
.color(0, 0.5, 1)
.scale(0.3, 0.04)
.scrollY(-0.45)
.out(o6)
// Торсионный индикатор
osc(2, 0.05)
.scale(() => 0.5 + osc(0.4, 0.05).x * 0.3, 1)
.color(0.8, 0.2, 1)
.scrollY(0.4)
.out(o7)
// Координаты цели
osc(3, 0.1)
.scale(0.15)
.scrollX(0.4)
.scrollY(-0.4)
.color(0, 1, 0)
.out(o8)
// Окно для синтетического сознания
shape(4, 0.3, 0.01)
.color(1, 1, 1, 0.5)
.modulate(osc(0.5).add(() => ${formantIntensity} * 0.05 + ${camIntensity} * 0.05), 0.01)
.out(o9)
// Карта окружения (радар)
osc(5, 0.2)
.color(0, 1, 0)
.scale(() => 0.2 + ${formantIntensity} * 0.1 + ${camIntensity} * 0.1)
.scrollX(0.35)
.scrollY(0.35)
.modulate(src(s0), 0.05)
.modulate(osc(10).rotate(0, 0.1), 0.05)
.out(o10)
// Эффекты пространства и финальная композиция
src(o0)
.add(src(o1), 0.3)
.add(src(o3), 0.3)
.add(src(o4), 0.3)
.add(src(o10), 0.4)
.modulate(noise(1, 0.05), 0.02)
.add(shape(4, 0.2, 0.01).rotate(0, 0.01 + ${formantIntensity} * 0.01 + ${camIntensity} * 0.01), 0.2)
.add(src(o5), 0.6)
.add(src(o6), 0.6)
.add(src(o7), 0.7)
.add(src(o8), 0.7)
.add(src(o9), 0.5)
.out()
`;
eval(terminal.value);
terminal.style.display = 'block';
}
// OSC Пресет 1: Медленные волны
function insertOscPreset1() {
terminal.value = `
osc(${randomParams.oscFreqGlobe}, 0.1, 1)
.color(${randomParams.colorR}, ${randomParams.colorG}, ${randomParams.colorB})
.modulate(noise(1, 0.05), 0.05)
.scale(() => 1 + a.fft[0] * 0.2 + ${formantIntensity} * 0.1 + ${camIntensity} * 0.1)
.modulate(src(s0), 0.05)
.kaleid(${randomParams.kaleidPreset1})
.out()
`;
eval(terminal.value);
terminal.style.display = 'block';
}
// OSC Пресет 2: Пульсирующий ритм
function insertOscPreset2() {
terminal.value = `
osc(${randomParams.oscFreqLasers}, 0.15, 1)
.color(1, 0.2, 0.5)
.modulate(osc(2, 0.05), 0.05)
.scale(() => 1 + a.fft[1] * 0.2 + ${formantIntensity} * 0.1 + ${camIntensity} * 0.1)
.modulate(src(s0), 0.05)
.kaleid(${randomParams.kaleidPreset2})
.out()
`;
eval(terminal.value);
terminal.style.display = 'block';
}
// OSC Пресет 3: Энергичный всплеск
function insertOscPreset3() {
terminal.value = `
osc(${randomParams.oscFreqLasers + 2}, 0.2, 1)
.color(0.5, 1, 0.2)
.modulate(noise(1, 0.1), 0.1)
.scale(() => 1 + a.fft[2] * 0.3 + ${formantIntensity} * 0.1 + ${camIntensity} * 0.1)
.modulate(src(s0), 0.05)
.kaleid(${randomParams.kaleidPreset3})
.out()
`;
eval(terminal.value);
terminal.style.display = 'block';
}
// Periodic randomization
setInterval(randomizeParameters, 10000);
insertDream();
toggleAudio();
toggleVoiceDetection();
toggleFormantSynthesis();
</script>
</body>
</html>