use of net.beadsproject.beads.core.AudioContext in project narchy by automenta.
the class LFO_Granulation_01 method main.
public static void main(String[] args) {
// instantiate the AudioContext
AudioContext ac = new AudioContext();
// load the source sample from a file
Sample sourceSample = null;
try {
sourceSample = new Sample("/tmp/Vocal/wav/Laugh1.wav");
} catch (Exception e) {
/*
* If the program exits with an error message,
* then it most likely can't find the file
* or can't open it. Make sure it is in the
* root folder of your project in Eclipse.
* Also make sure that it is a 16-bit,
* 44.1kHz audio file. These can be created
* using Audacity.
*/
System.out.println(e.getMessage());
e.printStackTrace();
System.exit(1);
}
// instantiate a GranularSamplePlayer
GranularSamplePlayer gsp = new GranularSamplePlayer(ac, sourceSample);
// tell gsp to loop the file
gsp.setLoopType(SamplePlayer.LoopType.LOOP_FORWARDS);
// set up a custom function to convert a WavePlayer LFO to grain duration values
WavePlayer wpGrainDurationLFO = new WavePlayer(ac, 0.03f, WaveFactory.SINE);
FuncGen grainDurationLFO = new FuncGen(wpGrainDurationLFO) {
@Override
public float floatValueOf(float[] x) {
return 1.0f + ((x[0] + 1.0f) * 50.0f);
}
};
// set the grain size to the LFO
gsp.setGrainSize(grainDurationLFO);
// set up a custom function to convert a WavePlayer LFO to grain interval values
WavePlayer wpGrainIntervalLFO = new WavePlayer(ac, 0.02f, WaveFactory.SINE);
FuncGen grainIntervalLFO = new FuncGen(wpGrainIntervalLFO) {
@Override
public float floatValueOf(float[] x) {
return 1.0f + ((x[0] + 1.0f) * 50.0f);
}
};
// set the grain size to the LFO
gsp.setGrainInterval(grainIntervalLFO);
// tell gsp to behave somewhat randomly
gsp.setRandomness(new Static(ac, 10.0f));
// set up a gain
Gain gain = new Gain(ac, 1, 0.5f);
gain.in(gsp);
// connect the Gain to ac
ac.out.in(gain);
// begin audio processing
ac.start();
Util.sleep((100 * 1000));
}
use of net.beadsproject.beads.core.AudioContext in project narchy by automenta.
the class arpeggiator_01 method setup.
// construct the synthesizer
public void setup() {
AudioContext ac = new AudioContext();
// the gain envelope
gainEnvelope = new Envelope(ac, 0.0f);
// set up a custom function to arpeggiate the pitch
arpeggiator = new FuncGen(gainEnvelope) {
@Override
public float floatValueOf(float[] anObject) {
return frequency * (1 + tick);
}
@Override
public void on(Auvent msg) {
tick++;
if (tick >= 4)
tick = 0;
}
};
// add arpeggiator as a dependent to the AudioContext
ac.out(arpeggiator);
// the square generator
square = new WavePlayer(ac, arpeggiator, WaveFactory.SQUARE);
// set up a clock to keep time
beatClock = new Clock(ac, 500.0f);
beatClock.setTicksPerBeat(4);
beatClock.on(arpeggiator);
ac.out.dependsOn(beatClock);
// set up the Gain and connect it to the main output
gain = new Gain(ac, 1, gainEnvelope);
gain.in(square);
ac.out.in(gain);
// set up the keyboard input
// MidiKeyboard keys = new MidiKeyboard();
// keys.addActionListener(new ActionListener(){
// @Override
// public void actionPerformed(ActionEvent e)
// {
// // if the event is not null
// if( e != null )
// {
// // if the event is a MIDI event
// if( e.getSource() instanceof ShortMessage)
// {
// // get the MIDI event
// ShortMessage sm = (ShortMessage)e.getSource();
//
// // if the event is a key down
// if( sm.getCommand() == MidiKeyboard.NOTE_ON && sm.getData2() > 1 )
// keyDown(sm.getData1());
// // if the event is a key up
// else if( sm.getCommand() == MidiKeyboard.NOTE_OFF )
// keyUp(sm.getData1());
// }
// }
// }
// });
keyDown(79);
beatClock.start();
Util.sleep(100000L);
}
use of net.beadsproject.beads.core.AudioContext in project narchy by automenta.
the class NAgentX method runRT.
public static NAR runRT(Function<NAR, NAgent> init, float narFPS, float agentFPS) {
// The.Subterms.the =
// The.Subterms.CaffeineSubtermBuilder.get();
// The.Subterms.HijackSubtermBuilder.get();
// The.Subterms.SoftSubtermBuilder.get();
// The.Compound.the =
// The.Compound.
// //SoftCompoundBuilder.get();
// CaffeineCompoundBuilder.get();
float clockFPS = // agentFPS;
narFPS;
RealTime clock = clockFPS >= 10 / 2f ? /* nyquist threshold between decisecond (0.1) and centisecond (0.01) clock resolution */
new RealTime.CS(true) : new RealTime.DSHalf(true);
clock.durFPS(clockFPS);
// Function<NAR, PrediTerm<Derivation>> deriver = Deriver.deriver(8
// , "motivation.nal"
// //., "relation_introduction.nal"
// );
// int THREADS = Math.max(1, Runtime.getRuntime().availableProcessors() - 1);
// Predicate<Activate> randomBool = (a) -> ThreadLocalRandom.current().nextBoolean();
// exe.add(new FocusExec(), (x) -> true);
// exe.add(new FocusExec() {
// {
// concepts.setCapacity(32);
// }
// },
// (x) -> true);
NAR n = new NARS().exe(new WorkerMultiExec(// new Focus.DefaultRevaluator(),
new Focus.AERevaluator(new XoRoShiRo128PlusRandom(1)), 256, 8192) {
{
Util.setExecutor(this);
}
}).time(clock).deriverAdd(1, 1).deriverAdd(2, 2).deriverAdd(3, 3).deriverAdd(5, 5).deriverAdd(6, 8).deriverAdd("motivation.nal").index(new CaffeineIndex(// 800 * 1024,
2500 * 1024, // Integer.MAX_VALUE,
c -> {
return (int) Math.ceil(c.voluplexity());
// return Math.round(
// ((float)c.voluplexity())
// / (1 + 100 * (c.termlinks().priSum() + c.tasklinks().priSum()))
// //(c.beliefs().size() + c.goals().size()))
// );
})).get();
// n.defaultWants();
n.dtMergeOrChoose.set(true);
// 0.5f //nyquist
n.dtDither.set(1f);
// n.timeFocus.set(4);
n.confMin.set(0.01f);
n.freqResolution.set(0.01f);
n.termVolumeMax.set(40);
n.beliefConfDefault.set(0.9f);
n.goalConfDefault.set(0.9f);
float priFactor = 0.2f;
n.beliefPriDefault.set(1f * priFactor);
n.goalPriDefault.set(1f * priFactor);
n.questionPriDefault.set(1f * priFactor);
n.questPriDefault.set(1f * priFactor);
n.activationRate.set(0.5f);
NAgent a = init.apply(n);
// new RLBooster(a, HaiQAgent::new, 1);
// // @Override
// // protected long matchTime(Task task) {
// //
// // //future lookahead to catalyze prediction
// // return n.time() +
// // Util.sqr(n.random().nextInt(3)) * n.dur();
// //
// // }
// };
// {
// AgentBuilder b = MetaGoal.newController(a);
// // .in(a::dexterity)
// // .in(new FloatNormalized(()->a.reward).decay(0.9f))
// // .in(new FloatNormalized(
// // ((Emotivation) n.emotion).cycleDTRealMean::getValue)
// // .decay(0.9f)
// // )
// b.in(new FloatNormalized(
// //TODO use a Long-specific impl of this:
// new FloatFirstOrderDifference(n::time, () -> n.emotion.deriveTask.getValue().longValue())
// ).relax(0.99f))
// // .in(new FloatNormalized(
// // //TODO use a Long-specific impl of this:
// // new FirstOrderDifferenceFloat(n::time, () -> n.emotion.conceptFirePremises.getValue().longValue())
// // ).decay(0.9f)
// .in(new FloatNormalized(
// () -> n.emotion.busyVol.getSum()
// ).relax(0.99f))
// .out(2, (onOff)->{
// switch(onOff) {
// case 0:
// a.enabled.set(false); //pause
// break;
// case 1:
// a.enabled.set(true); //un-pause
// break;
// }
// })
// // ).out(
// // new StepController((x) -> n.time.dur(Math.round(x)), 1, n.dur(), n.dur()*2)
// // .out(
// // StepController.harmonic(n.confMin::set, 0.01f, 0.5f)
// // )//.out(
// // StepController.harmonic(n.truthResolution::setValue, 0.01f, 0.08f)
// // ).out(
// // StepController.harmonic(a.curiosity::setValue, 0.01f, 0.16f)
// // ).get(n);
//
// ;
// new AgentService(new MutableFloat(1), n, b.get());
// }
// n.dtMergeOrChoose.setValue(true);
// STMLinkage stmLink = new STMLinkage(n, 1, false);
// LinkClustering linkClusterPri = new LinkClustering(n, Prioritized::priElseZero /* anything temporal */,
// 32, 128);
// LinkClustering linkClusterConf = new LinkClustering(n, (t) -> t.isBeliefOrGoal() ? t.conf() : Float.NaN,
// 4, 16);
// SpaceGraph.window(col(
// new STMView.BagClusterVis(n, linkClusterPri.bag),
// new STMView.BagClusterVis(n, linkClusterConf.bag)
// ), 800, 600);
// ConjClustering conjClusterBinput = new ConjClustering(n, BELIEF, (Task::isInput), 8, 32);
ConjClustering conjClusterBany = new ConjClustering(n, BELIEF, (t -> true), 8, 64);
// ConjClustering conjClusterG = new ConjClustering(n, GOAL, (t -> true), 4, 16);
// ArithmeticIntroduction arith = new ArithmeticIntroduction(4, n);
// RelationClustering relCluster = new RelationClustering(n,
// (t)->t.isBelief() && !t.isEternal() && !t.term().isTemporal() ? t.conf() : Float.NaN,
// 8, 32);
// ConjClustering conjClusterG = new ConjClustering(n, GOAL, (t->true),8, 32);
// n.runLater(() -> {
// // AudioContext ac = new AudioContext();
// // ac.start();
// // Clock aclock = new Clock(ac, 1000f / (agentFPS * 0.5f));
// // new Metronome(aclock, n);
// new VocalCommentary(null, a);
// //ac.out.dependsOn(aclock);
// });
// /needs tryContent before its safe
Inperience inp = new Inperience(n, 12);
//
// Abbreviation abb = new Abbreviation(n, "z", 3, 6, 10f, 32);
// reflect.ReflectSimilarToTaskTerm refSim = new reflect.ReflectSimilarToTaskTerm(16, n);
// reflect.ReflectClonedTask refTask = new reflect.ReflectClonedTask(16, n);
// a.trace = true;
// n.onTask(t -> {
// if (t instanceof DerivedTask)
// System.out.println(t);
// });
// NInner nin = new NInner(n);
// nin.start();
// AgentService mc = MetaGoal.newController(a);
// init();
// n.onCycle(nn -> {
// float lag = narLoop.lagSumThenClear() + a.running().lagSumThenClear();
// //n.emotion.happy(-lag);
// //n.emotion.happy(n.emotion.busyPri.getSum()/50000f);
// });
// new Anoncepts(8, n);
// new Implier(2f, a,
// 1
// //0,1,4
// );
//
// window(new MatrixView(p.in, (x, gl) -> {
// Draw.colorBipolar(gl, x);
// return 0;
// }), 100, 100);
// get ready
System.gc();
n.runLater(() -> {
chart(a);
SpaceGraph.window(Vis.top(a.nar()), 800, 800);
// window(new ConceptView(a.happy,n), 800, 600);
n.on(a);
// START AGENT
Loop aLoop = a.runFPS(agentFPS);
// n.runLater(() -> {
// new Deriver(a.fire(), Derivers.deriver(6, 8,
// "motivation.nal"
// //, "goal_analogy.nal"
// ).apply(n).deriver, n); //{
// });
});
Loop loop = n.startFPS(narFPS);
return n;
}
use of net.beadsproject.beads.core.AudioContext in project narchy by automenta.
the class drum_machine_01 method setup.
// construct the synthesizer
public void setup() {
AudioContext ac = new AudioContext();
// set up the envelope for kick gain
kickGainEnvelope = new Envelope(ac, 0.0f);
// construct the kick WavePlayer
kick = new WavePlayer(ac, 100.0f, WaveFactory.SINE);
// set up the filters
kickFilter = new BiquadFilter(ac, BiquadFilter.BESSEL_LP, 500.0f, 1.0f);
kickFilter.in(kick);
// set up the Gain
kickGain = new Gain(ac, 1, kickGainEnvelope);
kickGain.in(kickFilter);
// connect the gain to the main out
ac.out.in(kickGain);
// set up the snare envelope
snareGainEnvelope = new Envelope(ac, 0.0f);
// set up the snare WavePlayers
snareNoise = new WavePlayer(ac, 1.0f, WaveFactory.NOISE);
snareTone = new WavePlayer(ac, 200.0f, WaveFactory.SINE);
// set up the filters
snareFilter = new BiquadFilter(ac, BiquadFilter.BP_SKIRT, 2500.0f, 1.0f);
snareFilter.in(snareNoise);
snareFilter.in(snareTone);
// set up the Gain
snareGain = new Gain(ac, 1, snareGainEnvelope);
snareGain.in(snareFilter);
// connect the gain to the main out
ac.out.in(snareGain);
// set up the keyboard input
// MidiKeyboard keys = new MidiKeyboard();
// keys.addActionListener(new ActionListener(){
// @Override
// public void actionPerformed(ActionEvent e)
// {
// // if the event is not null
// if( e != null )
// {
// // if the event is a MIDI event
// if( e.getSource() instanceof ShortMessage)
// {
// // get the MIDI event
// ShortMessage sm = (ShortMessage)e.getSource();
//
// // if the event is a key down
// if( sm.getCommand() == MidiKeyboard.NOTE_ON && sm.getData2() > 1 )
// keyDown(sm.getData1());
// // if the event is a key up
// else if( sm.getCommand() == MidiKeyboard.NOTE_OFF )
// keyUp(sm.getData1());
// }
// }
// }
// });
ac.start();
Util.sleep(100000L);
}
Aggregations