|
|
|
(
|
|
|
|
// helper funcs
|
|
|
|
var hsArrayToCents, pDist, hdSum, hsChordalDistance, hsArrayToFreq;
|
|
|
|
|
|
|
|
// score funcs
|
|
|
|
var isInRange, spacingScore, rangeScore, intervalScore, inclusionScore;
|
|
|
|
|
|
|
|
// subroutines
|
|
|
|
var genTuples, initVoices, genOrders, genSubMotif, updateVoices, genDurFunc, genStepFunc;
|
|
|
|
|
|
|
|
// primary routines
|
|
|
|
var genMotif, genSecondarySeq;
|
|
|
|
|
|
|
|
// audition funcs
|
|
|
|
var genPatterns, genMidiPatterns;
|
|
|
|
|
|
|
|
// resource management funcs
|
|
|
|
var seedFunc, genUID, writeResources, stringifyToDepth, setSeeds, sanityCheck,
|
|
|
|
msgInterpret, loadLedgerFile, loadLedgerJSON, loadModelFile, loadModelJSON,
|
|
|
|
setGlobalVars, globalVarsToDict, saveLedger;
|
|
|
|
|
|
|
|
// model vars
|
|
|
|
//(model and global vars mostly set by OSC funcs
|
|
|
|
var seq, lastXChanges,
|
|
|
|
curUID, refUID, orderSeed, durSeed, motifSeed,
|
|
|
|
entrancesProbVals, passagesProbVals, exitsProbVals,
|
|
|
|
ranges, stepProbsVals, passagesWeights, hdExp, hdInvert,
|
|
|
|
orders, susWeights, orderSize, passagesSize,
|
|
|
|
motifEdited, orderEdited;
|
|
|
|
|
|
|
|
// model aux vars
|
|
|
|
var entrancesDurFunc, passagesDurFunc, exitsDurFunc, stepFunc;
|
|
|
|
|
|
|
|
// other global vars
|
|
|
|
var popSize, exPath, dir, primes, dims, tuples,
|
|
|
|
group, player, resourceDir, ledgerPath, ledger, currentlyPlayingUID,
|
|
|
|
nameSpaces;
|
|
|
|
|
|
|
|
// install JSON quark (not used)
|
|
|
|
/*
|
|
|
|
if(Quarks.isInstalled("JSONlib").not, {
|
|
|
|
Quarks.install("https://github.com/musikinformatik/JSONlib.git");
|
|
|
|
thisProcess.recompile;
|
|
|
|
//HelpBrowser.openHelpFor("Classes/JSONlib");
|
|
|
|
});
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
|
|
//------helper funcs
|
|
|
|
|
|
|
|
hsArrayToCents = {
|
|
|
|
arg hsArray;
|
|
|
|
hsArray.collect({arg dist, p; dist * 1200 * log2(primes[p][0]/primes[p][1])}).sum
|
|
|
|
};
|
|
|
|
|
|
|
|
pDist = {
|
|
|
|
arg array1, array2, signed = false;
|
|
|
|
var pDistance;
|
|
|
|
pDistance = hsArrayToCents.value(array1) - hsArrayToCents.value(array2);
|
|
|
|
if(signed, {pDistance}, {abs(pDistance)})
|
|
|
|
};
|
|
|
|
|
|
|
|
hdSum = {
|
|
|
|
arg hsArrays;
|
|
|
|
var size, distances, mean;
|
|
|
|
size = hsArrays.size;
|
|
|
|
distances = (size - 1).collect({arg i;
|
|
|
|
((i + 1)..(size - 1)).collect({arg j;
|
|
|
|
abs(hsArrays[i] - hsArrays[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum
|
|
|
|
});
|
|
|
|
}).flat;
|
|
|
|
mean = distances.sum / distances.size;
|
|
|
|
distances.sum
|
|
|
|
//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean)
|
|
|
|
};
|
|
|
|
|
|
|
|
hsChordalDistance = {
|
|
|
|
arg hsArrays1, hsArrays2;
|
|
|
|
var size, distances, mean;
|
|
|
|
size = hsArrays1.size;
|
|
|
|
distances = hsArrays1.size.collect({arg i;
|
|
|
|
hsArrays2.size.collect({arg j;
|
|
|
|
abs(hsArrays1[i] - hsArrays2[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum
|
|
|
|
});
|
|
|
|
}).flat;
|
|
|
|
mean = distances.sum / distances.size;
|
|
|
|
distances.sum
|
|
|
|
//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean)
|
|
|
|
};
|
|
|
|
|
|
|
|
hsArrayToFreq = {
|
|
|
|
arg array;
|
|
|
|
array.collect({arg dim, d; pow(primes[d][0]/primes[d][1], dim)}).product
|
|
|
|
};
|
|
|
|
|
|
|
|
//------score funcs
|
|
|
|
|
|
|
|
/*
|
|
|
|
isInRange = {
|
|
|
|
arg hsArray, min, max;
|
|
|
|
var cents;
|
|
|
|
cents = hsArrayToCents.value(hsArray);
|
|
|
|
(cents >= min) && (cents <= max)
|
|
|
|
};
|
|
|
|
*/
|
|
|
|
|
|
|
|
spacingScore = {
|
|
|
|
arg hsArrays, min;
|
|
|
|
var centsArray;
|
|
|
|
centsArray = hsArrays.collect({arg hsArray; hsArrayToCents.value(hsArray)}).sort({arg a, b; a < b});
|
|
|
|
centsArray.differentiate.drop(1).collect({arg pDistance; if(pDistance >= min, {1}, {0.01})}).sum;
|
|
|
|
};
|
|
|
|
|
|
|
|
rangeScore = {
|
|
|
|
arg hsArray1, hsArray2, min, max, low, signed = false;
|
|
|
|
var pDistance;
|
|
|
|
pDistance = pDist.value(hsArray1, hsArray2, signed);
|
|
|
|
if((pDistance >= min) && (pDistance <= max), {1}, {low});
|
|
|
|
};
|
|
|
|
|
|
|
|
intervalScore = {
|
|
|
|
arg hsArray1, hsArray2, mean, sd, signed = false;
|
|
|
|
var pDistance;
|
|
|
|
pDistance = pDist.value(hsArray1, hsArray2, signed);
|
|
|
|
//pDistance.gaussCurve(1, mean, sd)
|
|
|
|
stepFunc.value(pDistance);
|
|
|
|
};
|
|
|
|
|
|
|
|
inclusionScore = {
|
|
|
|
arg array, test, min = 0.01;
|
|
|
|
if(array.collect({arg v; v.hash}).includes(test.hash), {min}, {1});
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
//------subroutines
|
|
|
|
|
|
|
|
genTuples = {
|
|
|
|
var tuples;
|
|
|
|
tuples = dims.collect({[-1, 0, 1]}).allTuples.select({arg tuple; (abs(tuple.drop(1)).sum <= 1) && (tuple[0] == 0)});
|
|
|
|
tuples = tuples ++ tuples.collect({arg tuple; [-3, -2, -1, 1, 2, 3].collect({arg octTrans; tuple.deepCopy.put(0, octTrans)})}).flatten;
|
|
|
|
};
|
|
|
|
|
|
|
|
initVoices = {
|
|
|
|
var init, voicesInit;
|
|
|
|
voicesInit = popSize.collect({dims.collect({0})});
|
|
|
|
/*
|
|
|
|
voicesInit = [dims.collect({0})];
|
|
|
|
(popSize - 1).do({
|
|
|
|
arg rep, new;
|
|
|
|
rep = dims.rand;
|
|
|
|
new = voicesInit.last.deepCopy;
|
|
|
|
new[rep] = new[rep] + [-1, 1].choose();
|
|
|
|
voicesInit = voicesInit.add(new);
|
|
|
|
});
|
|
|
|
*/
|
|
|
|
voicesInit.deepCopy;
|
|
|
|
};
|
|
|
|
|
|
|
|
genDurFunc = {arg chordProb, minPad, maxPad, minDur, maxDur, envData, seed;
|
|
|
|
var env, pTable, durFunc;
|
|
|
|
env = Env.pairs([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).asSignal(256).asList.asArray;
|
|
|
|
pTable = env.asRandomTable;
|
|
|
|
[chordProb, minPad, maxPad, minDur, maxDur, envData].postln;
|
|
|
|
durFunc = {arg allowChord, pad = false;
|
|
|
|
var res;
|
|
|
|
res = if(allowChord.not, {
|
|
|
|
pTable.tableRand * (maxDur - minDur) + minDur
|
|
|
|
}, {
|
|
|
|
if(1.0.rand < chordProb, {0}, {pTable.tableRand * (maxDur - minDur) + minDur});
|
|
|
|
}).round(0.125);
|
|
|
|
if(pad, {res = res + rrand(minPad.asFloat, maxPad.asFloat).round(0.125)});
|
|
|
|
if(res.asInteger == res, {res = res.asInteger});
|
|
|
|
res
|
|
|
|
};
|
|
|
|
seedFunc.value(durFunc, seed);
|
|
|
|
};
|
|
|
|
|
|
|
|
genStepFunc = {arg minStep, maxStep, envData, seed;
|
|
|
|
var envDataNorm, env, pTable, stepFunc;
|
|
|
|
[minStep, maxStep, envData].postln;
|
|
|
|
envDataNorm = ([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).flop;
|
|
|
|
envDataNorm = [envDataNorm[0].normalize(minStep, maxStep), envDataNorm[1]].flop;
|
|
|
|
env = Env.pairs(envDataNorm);
|
|
|
|
stepFunc = {arg pDist;
|
|
|
|
env.at(pDist).clip(0.001, 1);
|
|
|
|
};
|
|
|
|
seedFunc.value(stepFunc, seed);
|
|
|
|
};
|
|
|
|
|
|
|
|
genOrders = {arg minMotifLength = 1, maxMotifLength = 5, minProgLength = 0, maxProgLength = 5;
|
|
|
|
((maxMotifLength - minMotifLength).rand + minMotifLength).collect({
|
|
|
|
var noProgIns, noSusIns, noSilentIns, prog, sus, silent, order;
|
|
|
|
noSusIns = [1, 2, 3].wchoose(susWeights.normalizeSum);
|
|
|
|
noProgIns = (popSize - noSusIns).rand + 1;
|
|
|
|
noSilentIns = popSize - noSusIns - noProgIns;
|
|
|
|
|
|
|
|
# prog, sus, silent = (0..(popSize-1)).scramble.clumps([noProgIns, noSusIns, noSilentIns]);
|
|
|
|
|
|
|
|
prog = (prog.scramble ++ ((maxProgLength - minProgLength).rand + minProgLength).collect({prog.choose}).scramble);
|
|
|
|
if(silent == nil, {silent = []});
|
|
|
|
[sus.scramble, prog, silent.scramble]
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
updateVoices = {arg ins, sus;
|
|
|
|
var voices, candidates, nWeights, nProbs, sel;
|
|
|
|
|
|
|
|
voices = lastXChanges.deepCopy.last;
|
|
|
|
|
|
|
|
candidates = sus.collect({arg v; tuples.collect({arg t; voices[v] + t})}).flatten;
|
|
|
|
candidates = difference(candidates.asSet, voices.asSet).asList;
|
|
|
|
nProbs = candidates.collect({arg candidate;
|
|
|
|
var stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore;
|
|
|
|
|
|
|
|
//stepScore = intervalScore.value(voices[ins], candidate, 30, 400, 0.1);
|
|
|
|
stepScore = intervalScore.value(voices[ins], candidate, 100, 100);
|
|
|
|
recentlySoundedScore = inclusionScore.value(lastXChanges.flop[ins], candidate, 0);
|
|
|
|
isInRangeScore = rangeScore.value(candidate, candidate.collect({0}), ranges[ins][0], ranges[ins][1], 0, true);
|
|
|
|
regScore = spacingScore.value(voices.deepCopy.put(ins, candidate), 300);
|
|
|
|
hdScore = pow(hdSum.value(voices.deepCopy.put(ins, candidate)), hdExp);
|
|
|
|
if(hdInvert == 0, {hdScore = 1/hdScore});
|
|
|
|
//maybe what you want here is a vector to another root and then favoring movement towards it.
|
|
|
|
//distScore = pow(hsChordalDistance.value(voices, voices.put(ins, candidate)), 2);
|
|
|
|
|
|
|
|
[stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore]
|
|
|
|
});
|
|
|
|
|
|
|
|
nWeights = passagesWeights;
|
|
|
|
|
|
|
|
//this handles nWeights of 0; mainly for testing
|
|
|
|
nProbs = nProbs.flop.select({arg scores, s; nWeights[s] != 0}).flop;
|
|
|
|
nWeights = nWeights.select({arg weight; weight != 0});
|
|
|
|
nProbs = nProbs.flop.collect({arg scores, s;
|
|
|
|
if(scores.sum == 0, {scores}, {scores.normalizeSum * nWeights[s]})
|
|
|
|
});
|
|
|
|
nProbs = nProbs.flop.collect({arg scores, s; scores.product}).normalizeSum;
|
|
|
|
|
|
|
|
sel = candidates.wchoose(nProbs);
|
|
|
|
|
|
|
|
voices[ins] = sel;
|
|
|
|
lastXChanges = lastXChanges.add(voices).keep(-5);
|
|
|
|
};
|
|
|
|
|
|
|
|
genSubMotif = {arg order, orderIndex, lastState, repeatLast = false, startFromLast = false, isLastOrder = false;
|
|
|
|
var sus, prog, silent, flatOrder, res, isInChord, allowChord, pad, lastXChangesHold, voices, adder;
|
|
|
|
# sus, prog, silent = order;
|
|
|
|
flatOrder = silent ++ sus ++ prog;
|
|
|
|
lastXChangesHold = lastXChanges.deepCopy;
|
|
|
|
voices = lastState.deepCopy;
|
|
|
|
isInChord = popSize.collect({false});
|
|
|
|
allowChord = false;
|
|
|
|
pad = false;
|
|
|
|
res = [];
|
|
|
|
"------generating motif".postln;
|
|
|
|
//need to figure out here if voices move between motifs
|
|
|
|
flatOrder.do({arg ins, i;
|
|
|
|
|
|
|
|
if(prog.includes(ins) && repeatLast.not, {updateVoices.value(ins, sus)});
|
|
|
|
adder = if(silent.includes(ins), {["Rest"]}, {lastXChanges.last.deepCopy[ins]});
|
|
|
|
|
|
|
|
if(voices[ins] != adder, {
|
|
|
|
var dur;
|
|
|
|
|
|
|
|
if((sus ++ silent).includes(ins), {
|
|
|
|
allowChord = (ins != sus.last);
|
|
|
|
pad = (ins == sus.last);
|
|
|
|
}, {
|
|
|
|
if(i < (flatOrder.size - 1), {
|
|
|
|
allowChord = (isInChord[flatOrder[i + 1]] || (ins == flatOrder[i + 1])).not;
|
|
|
|
pad = false;
|
|
|
|
}, {
|
|
|
|
allowChord = false;
|
|
|
|
pad = true
|
|
|
|
});
|
|
|
|
});
|
|
|
|
if((orderIndex == 0) && sus.includes(ins), {
|
|
|
|
dur = entrancesDurFunc.value(allowChord, pad);
|
|
|
|
}, {
|
|
|
|
dur = passagesDurFunc.value(allowChord, pad);
|
|
|
|
});
|
|
|
|
if(dur == 0, {isInChord[ins] = true}, {isInChord = popSize.collect({false})});
|
|
|
|
|
|
|
|
voices[ins] = adder;
|
|
|
|
res = res.add([voices.deepCopy.postln, dur]);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
// pad ending
|
|
|
|
if(orderIndex == (orders.size - 1), {
|
|
|
|
(0..(popSize-1)).scramble.do({arg ins;
|
|
|
|
if(res.last.first[ins] != ["Rest"], {
|
|
|
|
var dur;
|
|
|
|
voices[ins] = ["Rest"];
|
|
|
|
allowChord = (voices != popSize.collect({["Rest"]}));
|
|
|
|
pad = allowChord.not;
|
|
|
|
dur = exitsDurFunc.value(allowChord, pad);
|
|
|
|
res = res.add([voices.deepCopy.postln, dur]);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
//format and return
|
|
|
|
if(startFromLast, {lastXChanges = lastXChangesHold.deepCopy});
|
|
|
|
res;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
//------primary routines
|
|
|
|
|
|
|
|
genMotif = {
|
|
|
|
var repeats, fSeq, fDur, durAdd;
|
|
|
|
|
|
|
|
repeats = 1;
|
|
|
|
fSeq = [];
|
|
|
|
|
|
|
|
repeats.do({arg index;
|
|
|
|
var motif;
|
|
|
|
|
|
|
|
motif = [];
|
|
|
|
|
|
|
|
orders.do({arg order, o;
|
|
|
|
var lastState, subMotif;
|
|
|
|
lastState = if(o == 0, {popSize.collect({["Rest"]})}, {motif.last.last.first});
|
|
|
|
subMotif = genSubMotif.value(order, o, lastState, isLastOrder: o == (orders.size - 1));
|
|
|
|
motif = motif.add(subMotif);
|
|
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
sanityCheck.value(motif, index);
|
|
|
|
|
|
|
|
fSeq = fSeq.add(motif);
|
|
|
|
});
|
|
|
|
|
|
|
|
//round last duration to measure
|
|
|
|
fDur = fSeq.flatten.flatten.slice(nil, 1).sum;
|
|
|
|
durAdd = fDur.round(4) - fDur;
|
|
|
|
if(durAdd < 0, {durAdd = 4 - durAdd});
|
|
|
|
fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] = fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] + durAdd;
|
|
|
|
|
|
|
|
fSeq
|
|
|
|
};
|
|
|
|
|
|
|
|
genSecondarySeq = {arg seq;
|
|
|
|
var curdles, fSeq;
|
|
|
|
curdles = [];
|
|
|
|
while({curdles.sum < seq.size}, {curdles = curdles ++ [3.rand + 1]});
|
|
|
|
|
|
|
|
fSeq = seq.clumps(curdles).collect({arg clump, m;
|
|
|
|
var repeats, paddedSeq;
|
|
|
|
|
|
|
|
//add rest
|
|
|
|
paddedSeq = clump.add([[[popSize.collect({["Rest"]}), 0.5.rand]]]);
|
|
|
|
|
|
|
|
//implement repeats
|
|
|
|
repeats = [0.rand + 1, 1].wchoose([1, 0].normalizeSum);
|
|
|
|
repeats.collect({paddedSeq});
|
|
|
|
});
|
|
|
|
fSeq
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
//------audition funcs
|
|
|
|
|
|
|
|
/*
|
|
|
|
Event.addEventType(\osc, {
|
|
|
|
if (~addr.postln.notNil) {
|
|
|
|
~addr.sendMsg(~indexPath, ~indexMsg);
|
|
|
|
~addr.sendMsg(~seqPath, stringifyToDepth.value(~seqMsg, 3));
|
|
|
|
//~addr.sendMsg("/STATE/OPEN", (dir.replace("supercollider", "resources") +/+ ~idMsg +/+ ~idMsg ++ "_gui_state" ++ ".state").standardizePath.postln);
|
|
|
|
};
|
|
|
|
});
|
|
|
|
*/
|
|
|
|
|
|
|
|
Event.addEventType(\osc, {
|
|
|
|
if (~addr.notNil) {
|
|
|
|
~msg;
|
|
|
|
~addr.sendMsg(~path, *~msg);
|
|
|
|
};
|
|
|
|
});
|
|
|
|
|
|
|
|
genPatterns = {arg inSeq, addr, oneShot = false;
|
|
|
|
var voices, durs, pbinds, res, indices, sectionDurs, msg, ids, seq;
|
|
|
|
seq = inSeq.collect({arg mSeq; mSeq[0]});
|
|
|
|
# voices, durs = seq.flatten2(seq.maxDepth - 5).flop;
|
|
|
|
pbinds = voices.flop.collect({arg voice, v;
|
|
|
|
var clumps, hdScores, freqs, fDurs, attacks, rels, amps;
|
|
|
|
clumps = voice.separate({arg a, b; a != b });
|
|
|
|
freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})});
|
|
|
|
fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum});
|
|
|
|
//attacks = 2.collect({rrand(1, 3)}) ++ freqs.drop(2).collect({rrand(0.3, 0.5)});
|
|
|
|
attacks = fDurs.collect({arg dur; dur * rrand(0.2, 0.4)});
|
|
|
|
//rels = freqs.drop(2).collect({rrand(0.3, 0.5)}) ++ 2.collect({rrand(1, 3)});
|
|
|
|
rels = (clumps.size - 1).collect({arg c;
|
|
|
|
if(clumps[c + 1][0] == ["Rest"], {rrand(1.0, 3.0)}, {rrand(0.3, 0.5)});
|
|
|
|
});
|
|
|
|
rels = rels.add(rrand(1.0, 3.0));
|
|
|
|
amps = freqs.collect({rrand(0.6, 0.99)});
|
|
|
|
|
|
|
|
[
|
|
|
|
Pbind(
|
|
|
|
\instrument, \string_model,
|
|
|
|
\group, group,
|
|
|
|
\freq, Pseq(freqs, 1),
|
|
|
|
\dur, Pseq(fDurs, 1),
|
|
|
|
\attack, Pseq(attacks, 1),
|
|
|
|
\sustain, Pseq(fDurs, 1),
|
|
|
|
\release, Pseq(rels, 1),
|
|
|
|
//\amp, Pseq(amps, 1),
|
|
|
|
\amp, Pbrown(0.5, 1, 0.5),
|
|
|
|
\busIndex, v
|
|
|
|
),
|
|
|
|
Pbind(
|
|
|
|
\instrument, \sine,
|
|
|
|
\group, group,
|
|
|
|
\freq, Pseq(freqs, 1),
|
|
|
|
\dur, Pseq(fDurs, 1),
|
|
|
|
\sustain, Pseq(fDurs, 1),
|
|
|
|
\busIndex, v
|
|
|
|
)
|
|
|
|
]
|
|
|
|
}).flatten;
|
|
|
|
if(oneShot.not, {
|
|
|
|
msg = inSeq.collect({arg mSeq, m; mSeq[1..]});
|
|
|
|
//ids = inSeq.collect({arg mSeq, m; mSeq[2]});
|
|
|
|
sectionDurs = seq.collect({arg mSeq; mSeq.flatten2(mSeq.maxDepth - 5).flop[1].sum});
|
|
|
|
pbinds = pbinds ++
|
|
|
|
[
|
|
|
|
Pbind(
|
|
|
|
\type, \osc,
|
|
|
|
\addr, addr,
|
|
|
|
\path, "/playing",
|
|
|
|
\msg, Pseq(msg, 1),
|
|
|
|
\dur, Pseq(sectionDurs, 1)
|
|
|
|
);
|
|
|
|
]
|
|
|
|
});
|
|
|
|
res = Ppar(pbinds);
|
|
|
|
res
|
|
|
|
};
|
|
|
|
|
|
|
|
/*
|
|
|
|
genMidiPatterns = {arg seq;
|
|
|
|
var voices, durs, patterns, res, mOut, pbRange;
|
|
|
|
pbRange = 1; //semitones - change this as needed for your situation
|
|
|
|
mOut = MIDIOut.newByName("TiMidity", "TiMidity port 0").latency_(Server.default.latency);
|
|
|
|
# voices, durs = seq.flatten2(seq.maxDepth - 5).flop;
|
|
|
|
res = Ppar(
|
|
|
|
voices.flop.collect({arg voice, v;
|
|
|
|
var clumps, hdScores, freqs, fDurs;
|
|
|
|
|
|
|
|
mOut.program(v, 70);
|
|
|
|
|
|
|
|
clumps = voice.separate({arg a, b; a != b });
|
|
|
|
freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})});
|
|
|
|
fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum});
|
|
|
|
|
|
|
|
Pbind(
|
|
|
|
\type, \midi,
|
|
|
|
\chan, v,
|
|
|
|
\noteval, Pseq(freqs.cpsmidi - 24, 1),
|
|
|
|
\note, Pfunc({ | event | event[\noteval].floor }),
|
|
|
|
\dur, Pseq(fDurs, 1),
|
|
|
|
\midiout, mOut,
|
|
|
|
\amp, 1,
|
|
|
|
\bend, Pfunc({
|
|
|
|
| event |
|
|
|
|
if (event[\note].isRest.not) {
|
|
|
|
var pitchbendvalue = event[\noteval].frac.linlin(0, pbRange, 8192, 8192*2).asInteger;
|
|
|
|
m.bend(v, pitchbendvalue);
|
|
|
|
};
|
|
|
|
0; // return something other than nil to avoid stopping the pattern
|
|
|
|
}),
|
|
|
|
);
|
|
|
|
});
|
|
|
|
);
|
|
|
|
res
|
|
|
|
};
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
|
|
//------resource management funcs
|
|
|
|
|
|
|
|
genUID = {Date.seed.asHexString.toLower};
|
|
|
|
|
|
|
|
seedFunc = {arg func, seed;
|
|
|
|
var funcArgs, next;
|
|
|
|
next = Routine({loop{func.valueArray(funcArgs).yield }});
|
|
|
|
next.randSeed_(seed);
|
|
|
|
{arg ...args; funcArgs = args; next.value}
|
|
|
|
};
|
|
|
|
|
|
|
|
stringifyToDepth = {arg data, maxDepth = 1;
|
|
|
|
var prettyString = "", rCount = 0, writeArray, indent;
|
|
|
|
|
|
|
|
if(maxDepth == 0, {
|
|
|
|
data.asCompileString
|
|
|
|
}, {
|
|
|
|
indent = {arg size; size.collect({" "}).join("")};
|
|
|
|
writeArray = {arg array;
|
|
|
|
prettyString = prettyString ++ indent.value(rCount) ++ "[\n";
|
|
|
|
rCount = rCount + 1;
|
|
|
|
if(rCount < maxDepth, {
|
|
|
|
array.do({arg subArray; writeArray.value(subArray)});
|
|
|
|
}, {
|
|
|
|
prettyString = prettyString ++ array.collect({arg subArray;
|
|
|
|
indent.value(rCount + 1) ++ subArray.asCompileString
|
|
|
|
}).join(",\n");
|
|
|
|
});
|
|
|
|
rCount = rCount - 1;
|
|
|
|
prettyString = prettyString ++ "\n" ++ indent.value(rCount) ++ "],\n";
|
|
|
|
};
|
|
|
|
|
|
|
|
writeArray.value(data);
|
|
|
|
prettyString.replace(",\n\n", "\n").drop(-2);
|
|
|
|
})
|
|
|
|
};
|
|
|
|
|
|
|
|
sanityCheck = {arg motif, index;
|
|
|
|
//print functions = very helpful
|
|
|
|
("----------" + index + "------------").postln;
|
|
|
|
|
|
|
|
motif.flatten.do({arg val, v;
|
|
|
|
if(v > 0, {
|
|
|
|
if(motif.flatten[v-1][0].hammingDistance(val[0]) > 1, {"problem 1".postln});
|
|
|
|
if(motif.flatten[v-1][0].hammingDistance(val[0]) == 0, {"problem 2".postln});
|
|
|
|
});
|
|
|
|
val.postln
|
|
|
|
});
|
|
|
|
"***********".postln;
|
|
|
|
};
|
|
|
|
|
|
|
|
msgInterpret = {arg in, escapeDoubleQuotes = true, escapeSingleQuotes = true;
|
|
|
|
var res;
|
|
|
|
|
|
|
|
res = in;
|
|
|
|
if(res.isNil.not, {
|
|
|
|
if((res.isArray && res.isString.not), {
|
|
|
|
res = res.asCompileString;
|
|
|
|
res = res.replace(" ", "").replace("\n", "").replace("\t", "");
|
|
|
|
if(escapeSingleQuotes, {res = res.replace("\'", "")});
|
|
|
|
if(escapeDoubleQuotes, {res = res.replace("\"", "")});
|
|
|
|
res = res.replace("Rest", "\"Rest\"");
|
|
|
|
res = res.interpret;
|
|
|
|
}, {
|
|
|
|
var tmpRes;
|
|
|
|
if(res.every({arg char; char.isDecDigit}), {tmpRes = res.asInteger});
|
|
|
|
if(res.contains("."), {tmpRes = res.asFloat});
|
|
|
|
if(tmpRes != nil, {res = tmpRes});
|
|
|
|
});
|
|
|
|
});
|
|
|
|
res
|
|
|
|
};
|
|
|
|
|
|
|
|
writeResources = {arg path, dict;
|
|
|
|
var file, modelItems, resString;
|
|
|
|
file = File(path,"w");
|
|
|
|
|
|
|
|
modelItems = [
|
|
|
|
seq, lastXChanges,
|
|
|
|
curUID, refUID, orderSeed, durSeed, motifSeed,
|
|
|
|
entrancesProbVals, passagesProbVals, exitsProbVals,
|
|
|
|
ranges, stepProbsVals, passagesWeights, hdExp, hdInvert,
|
|
|
|
orders, susWeights, orderSize, passagesSize,
|
|
|
|
motifEdited, orderEdited
|
|
|
|
];
|
|
|
|
|
|
|
|
resString = nameSpaces.collect({arg nameSpace;
|
|
|
|
var depth = 0, insert = " ";
|
|
|
|
if(nameSpace == "music_data", {depth = 3; insert = "\n"});
|
|
|
|
if(nameSpace == "last_changes", {depth = 1; insert = "\n"});
|
|
|
|
if(nameSpace == "order", {depth = 1; insert = "\n"});
|
|
|
|
if((nameSpace == "ref_uid") && (dict[nameSpace] == nil), {dict[nameSpace] = "nil"});
|
|
|
|
"\"" ++ nameSpace ++ "\":" ++ insert ++ stringifyToDepth.value(dict[nameSpace], depth)
|
|
|
|
}).join(",\n");
|
|
|
|
|
|
|
|
resString = "{\n" ++ resString ++ "\n}";
|
|
|
|
|
|
|
|
file.write(resString);
|
|
|
|
file.close;
|
|
|
|
resString
|
|
|
|
};
|
|
|
|
|
|
|
|
loadModelFile = {arg path; loadModelJSON.value(File(path, "r").readAllString.parseJSON)};
|
|
|
|
|
|
|
|
loadModelJSON = {arg jsonObject;
|
|
|
|
var dict;
|
|
|
|
dict = Dictionary.with(*nameSpaces.collect({arg nS; nS->msgInterpret.value(jsonObject[nS])}));
|
|
|
|
dict
|
|
|
|
};
|
|
|
|
|
|
|
|
setGlobalVars = {arg dict, skipLastXChanges = false;
|
|
|
|
var tmpLastXChanges;
|
|
|
|
tmpLastXChanges = lastXChanges.deepCopy;
|
|
|
|
// order really matters!!!!
|
|
|
|
# seq, lastXChanges, curUID, refUID, orderSeed, durSeed, motifSeed,
|
|
|
|
entrancesProbVals, passagesProbVals, exitsProbVals,
|
|
|
|
ranges, stepProbsVals, passagesWeights, hdExp, hdInvert,
|
|
|
|
orders, susWeights, orderSize, passagesSize,
|
|
|
|
motifEdited, orderEdited = nameSpaces.collect({arg nS; dict[nS]});
|
|
|
|
if(skipLastXChanges, {lastXChanges = tmpLastXChanges});
|
|
|
|
dict
|
|
|
|
};
|
|
|
|
|
|
|
|
globalVarsToDict = {
|
|
|
|
var modelItems, dict;
|
|
|
|
// order really matters!!!!
|
|
|
|
modelItems = [
|
|
|
|
seq, lastXChanges,
|
|
|
|
curUID, refUID, orderSeed, durSeed, motifSeed,
|
|
|
|
entrancesProbVals, passagesProbVals, exitsProbVals,
|
|
|
|
ranges, stepProbsVals, passagesWeights, hdExp, hdInvert,
|
|
|
|
orders, susWeights, orderSize, passagesSize,
|
|
|
|
motifEdited, orderEdited
|
|
|
|
];
|
|
|
|
dict = Dictionary.with(*nameSpaces.collect({arg nS, n; nS->modelItems[n]}));
|
|
|
|
};
|
|
|
|
|
|
|
|
loadLedgerFile = {arg path;
|
|
|
|
ledgerPath = path;
|
|
|
|
resourceDir = path.splitext(".").drop(-1).join;
|
|
|
|
loadLedgerJSON.value(File(ledgerPath, "r").readAllString.parseJSON)
|
|
|
|
};
|
|
|
|
|
|
|
|
loadLedgerJSON = {arg jsonObject; ledger = jsonObject["ledger"]};
|
|
|
|
|
|
|
|
saveLedger = {arg ledger, path;
|
|
|
|
var file, curResourceDir;
|
|
|
|
file = File(path, "w");
|
|
|
|
curResourceDir = resourceDir;
|
|
|
|
resourceDir = path.splitext(".").drop(-1).join;
|
|
|
|
File.mkdir(resourceDir);
|
|
|
|
ledger.do({arg id;
|
|
|
|
File.copy(curResourceDir +/+ id, resourceDir +/+ id);
|
|
|
|
});
|
|
|
|
file.write("{\n\"ledger\":\n" ++ stringifyToDepth.value(ledger, 1) ++ "\n}");
|
|
|
|
file.close;
|
|
|
|
};
|
|
|
|
|
|
|
|
//------global vars
|
|
|
|
|
|
|
|
primes = [[2, 1], [3, 2], [5, 4], [7, 4], [11, 8], [13, 8]];
|
|
|
|
//ranges = [[-2400, 0], [-1200, 1200], [0, 2400], [0, 2400]];
|
|
|
|
exPath = thisProcess.nowExecutingPath;
|
|
|
|
dir = exPath.dirname;
|
|
|
|
//popSize = 4;
|
|
|
|
dims = primes.size;
|
|
|
|
tuples = genTuples.value();
|
|
|
|
//refUID = nil;
|
|
|
|
group = Group.new;
|
|
|
|
~group = group;
|
|
|
|
loadLedgerFile.value(dir +/+ ".." +/+ "resources" +/+ "piece_ledger.json");
|
|
|
|
resourceDir = (dir +/+ ".." +/+ "resources" +/+ "piece_ledger");
|
|
|
|
//passagesWeights = [1, 1, 1, 1, 1];
|
|
|
|
//susWeights = [1, 1, 1];
|
|
|
|
// order really matters!!!!
|
|
|
|
nameSpaces = [
|
|
|
|
"music_data", "last_changes", "cur_uid", "ref_uid", "order_seed", "dur_seed", "motifs_seed",
|
|
|
|
"entrances_probs_vals","passages_probs_vals", "exits_probs_vals",
|
|
|
|
"ranges", "step_probs_vals", "passages_weights", "hd_exp", "hd_invert",
|
|
|
|
"order", "sus_weights", "order_size", "passages_size",
|
|
|
|
"motif_edited", "order_edited"
|
|
|
|
];
|
|
|
|
|
|
|
|
|
|
|
|
//------OSC funcs
|
|
|
|
|
|
|
|
OSCdef(\load_ledger, {arg msg, time, addr, port;
|
|
|
|
loadLedgerFile.value(msg[1].asString);
|
|
|
|
}, \load_ledger);
|
|
|
|
|
|
|
|
OSCdef(\load_model, {arg msg, time, addr, port;
|
|
|
|
var dict;
|
|
|
|
dict = loadModelFile.value(msg[1].asString);
|
|
|
|
setGlobalVars.value(dict);
|
|
|
|
}, \load_model);
|
|
|
|
|
|
|
|
OSCdef(\save_ledger, {arg msg, time, addr, port;
|
|
|
|
msg.postln;
|
|
|
|
ledger = msgInterpret.value(msg[1].asString.parseJSON["ledger"], false).postln;
|
|
|
|
//loadLedgerJSON.value(msg[0])
|
|
|
|
saveLedger.value(ledger, msg[2].asString);
|
|
|
|
//loadLedgerFile.value(msg[1].asString);
|
|
|
|
}, \save_ledger);
|
|
|
|
|
|
|
|
OSCdef(\generate, {arg msg, time, addr, port;
|
|
|
|
var path, dict, durSeeds, musPath, modelString;
|
|
|
|
msg.postln;
|
|
|
|
|
|
|
|
path = msg[1].asString;
|
|
|
|
|
|
|
|
dict = loadModelFile.value(path);
|
|
|
|
setGlobalVars.value(dict, true);
|
|
|
|
|
|
|
|
popSize = ranges.size;
|
|
|
|
|
|
|
|
//refUID.postln;
|
|
|
|
|
|
|
|
loadLedgerFile.value(ledgerPath);
|
|
|
|
if(ledger == nil, {ledger = ["tmp"]});
|
|
|
|
if(ledger.last != "tmp", {ledger = ledger.add("tmp")});
|
|
|
|
|
|
|
|
if(refUID == nil, {lastXChanges = [initVoices.value().deepCopy]});
|
|
|
|
if((refUID != nil) && (refUID != "tmp"), {
|
|
|
|
var file;
|
|
|
|
file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r");
|
|
|
|
lastXChanges = msgInterpret.value(file.readAllString.parseJSON["last_changes"]);
|
|
|
|
});
|
|
|
|
|
|
|
|
refUID.postln;
|
|
|
|
lastXChanges.collect({arg item; item.postln});
|
|
|
|
|
|
|
|
durSeeds = seedFunc.value({3.collect({rrand(100000, 999999)})}, durSeed).value.postln;
|
|
|
|
entrancesDurFunc = genDurFunc.valueArray(entrancesProbVals[..4] ++ [entrancesProbVals[5..]] ++ [durSeeds[0]]);
|
|
|
|
passagesDurFunc = genDurFunc.valueArray(passagesProbVals[..4] ++ [passagesProbVals[5..]] ++ [durSeeds[1]]);
|
|
|
|
exitsDurFunc = genDurFunc.valueArray(exitsProbVals[..4] ++ [exitsProbVals[5..]] ++ [durSeeds[2]]);
|
|
|
|
|
|
|
|
if(orders == nil, {
|
|
|
|
orders = seedFunc.value(genOrders, orderSeed).valueArray(orderSize ++ passagesSize);
|
|
|
|
//addr.sendMsg("/order", stringifyToDepth.value(orders, 1));
|
|
|
|
});
|
|
|
|
|
|
|
|
stepFunc = genStepFunc.valueArray(stepProbsVals[..1] ++ [stepProbsVals[2..]] ++ [motifSeed]);
|
|
|
|
seq = seedFunc.value(genMotif, motifSeed).value;
|
|
|
|
|
|
|
|
lastXChanges.collect({arg item; item.postln});
|
|
|
|
|
|
|
|
dict = globalVarsToDict.value;
|
|
|
|
modelString = writeResources.value(path, dict);
|
|
|
|
|
|
|
|
//addr.sendMsg("/generated", musPath, stringifyToDepth.value(seq, 3));
|
|
|
|
//~seq = seq;
|
|
|
|
|
|
|
|
addr.sendMsg("/generated", path, modelString, ledgerPath);
|
|
|
|
}, \generate);
|
|
|
|
|
|
|
|
|
|
|
|
OSCdef(\commit, {arg msg, time, addr, port;
|
|
|
|
var musicData, musicChanged, dict, newLedger, modelPath, musString, musFile, test1, test2, lastCurUID, commitType, commitPos, equalityLedger;
|
|
|
|
//msg.postln;
|
|
|
|
|
|
|
|
/*
|
|
|
|
test1 = msg[1].asString.parseJSON;
|
|
|
|
test2 = (dir +/+ ".." +/+ "resources/tmp/tmp_music" ++ ".json").standardizePath.parseJSONFile;
|
|
|
|
msgInterpret.value(test1["music"])[0][0][0][1].class.postln;
|
|
|
|
msgInterpret.value(test2["music_data"])[0][0][0][1].class.postln;
|
|
|
|
(test1["music"] == test2["music_data"]).postln;
|
|
|
|
*/
|
|
|
|
|
|
|
|
musicData = loadModelJSON.value(msg[1].asString.parseJSON)["music_data"].postln;
|
|
|
|
musicChanged = (musicData != seq).postln;
|
|
|
|
commitType = msg[2].asString;
|
|
|
|
commitPos = msg[3].postln.asInteger;
|
|
|
|
|
|
|
|
lastCurUID = curUID.deepCopy;
|
|
|
|
curUID = genUID.value;
|
|
|
|
|
|
|
|
File.mkdir((resourceDir +/+ curUID).standardizePath);
|
|
|
|
File.copy(exPath, (resourceDir +/+ curUID +/+ curUID ++ "_code" ++ ".scd").standardizePath);
|
|
|
|
|
|
|
|
modelPath = (resourceDir +/+ curUID +/+ curUID ++ "_mus_model" ++ ".json").standardizePath;
|
|
|
|
dict = globalVarsToDict.value;
|
|
|
|
if(musicChanged, {
|
|
|
|
seq = musicData;
|
|
|
|
dict["music_data"] = seq;
|
|
|
|
dict["motif_edited"] = "true"
|
|
|
|
});
|
|
|
|
dict["cur_uid"] = curUID;
|
|
|
|
|
|
|
|
writeResources.value(modelPath, dict);
|
|
|
|
|
|
|
|
File.delete(ledgerPath ++ "_bak");
|
|
|
|
File.copy(ledgerPath, ledgerPath ++ "_bak");
|
|
|
|
File.delete(ledgerPath);
|
|
|
|
|
|
|
|
/*
|
|
|
|
if(commitType == "add", {
|
|
|
|
if(lastCurUID == "tmp", {
|
|
|
|
ledger = ledger.drop(-1).add(curUID);
|
|
|
|
}, {
|
|
|
|
ledger = ledger.add(curUID);
|
|
|
|
})
|
|
|
|
});
|
|
|
|
*/
|
|
|
|
|
|
|
|
ledger.postln;
|
|
|
|
|
|
|
|
if(commitType == "add", {ledger = ledger.add(curUID)});
|
|
|
|
|
|
|
|
if(commitType == "insert", {ledger = ledger.insert(commitPos + 1, curUID)});
|
|
|
|
|
|
|
|
if(commitType == "replace", {ledger = ledger.put(commitPos, curUID)});
|
|
|
|
|
|
|
|
equalityLedger = ledger.collect({arg item; item.asSymbol});
|
|
|
|
if(equalityLedger.includes(\tmp).postln, {ledger.removeAt(equalityLedger.indexOf(\tmp).postln)});
|
|
|
|
|
|
|
|
ledger.postln;
|
|
|
|
|
|
|
|
saveLedger.value(ledger, ledgerPath);
|
|
|
|
|
|
|
|
addr.sendMsg("/committed", curUID, ledgerPath);
|
|
|
|
//refUID = curUID;
|
|
|
|
|
|
|
|
}, \commit);
|
|
|
|
|
|
|
|
OSCdef(\transport, {arg msg, time, addr, port;
|
|
|
|
msg.postln;
|
|
|
|
if(msg[1] == 0, {
|
|
|
|
group.set(\release, 2);
|
|
|
|
group.set(\gate, 0);
|
|
|
|
player.stop;
|
|
|
|
}, {
|
|
|
|
// the cued sequence can now be read from file, so this can be cleaned up
|
|
|
|
var cSize, patterns, pSeq, cuedSeek, indexStart, indexEnd, tmpLedger;
|
|
|
|
if(msg[1] == 1, {
|
|
|
|
pSeq = [];
|
|
|
|
cuedSeek = (seq != nil);
|
|
|
|
indexStart = msg[2].asInteger;
|
|
|
|
indexEnd = ledger.size - if(cuedSeek, {2}, {1});
|
|
|
|
//ledger.postln;
|
|
|
|
if(((indexStart == (ledger.size - 1)) && cuedSeek).not, {
|
|
|
|
ledger[indexStart..indexEnd].do({arg uid, index;
|
|
|
|
var path, file;
|
|
|
|
path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath;
|
|
|
|
file = File(path, "r");
|
|
|
|
pSeq = pSeq.add([msgInterpret.value(file.readAllString.postln.parseJSON["music_data"]), path, indexStart + index, uid]);
|
|
|
|
file.close;
|
|
|
|
});
|
|
|
|
});
|
|
|
|
if(cuedSeek, {
|
|
|
|
var path, file;
|
|
|
|
path = (resourceDir +/+ "tmp/tmp_mus_model" ++ ".json").standardizePath;
|
|
|
|
file = File(path, "r");
|
|
|
|
pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]);
|
|
|
|
file.close;
|
|
|
|
});
|
|
|
|
patterns = genPatterns.value(pSeq, addr);
|
|
|
|
}, {
|
|
|
|
pSeq = [loadModelJSON.value(msg[2].asString.parseJSON)["music_data"].postln];
|
|
|
|
patterns = genPatterns.value(pSeq, addr, true);
|
|
|
|
});
|
|
|
|
player = Pfset(pattern: patterns, cleanupFunc: {
|
|
|
|
addr.sendMsg("/transport", 0);
|
|
|
|
addr.sendMsg("/one_shot", 0);
|
|
|
|
});
|
|
|
|
player = player.play
|
|
|
|
});
|
|
|
|
}, \transport);
|
|
|
|
|
|
|
|
|
|
|
|
OSCdef(\transcribe_motif, {arg msg, time, addr, port;
|
|
|
|
var tSeq, refChord, refUID;
|
|
|
|
|
|
|
|
msg.postln;
|
|
|
|
|
|
|
|
tSeq = [loadModelJSON.value(msg[1].asString.parseJSON)["music_data"]];
|
|
|
|
refUID = msg[2].asString.postln;
|
|
|
|
|
|
|
|
if((refUID != "nil") && (refUID != "tmp"), {
|
|
|
|
var file;
|
|
|
|
file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r");
|
|
|
|
refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last;
|
|
|
|
file.close;
|
|
|
|
}, {
|
|
|
|
refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]];
|
|
|
|
});
|
|
|
|
|
|
|
|
~transcribe.value(tSeq, refChord, (dir +/+ ".." +/+ "lilypond" +/+ "includes").standardizePath, addr, "/transcribe_motif");
|
|
|
|
}, \transcribe_motif);
|
|
|
|
|
|
|
|
|
|
|
|
OSCdef(\transcribe_all, {arg msg, time, addr, port;
|
|
|
|
var cSize, patterns, cuedSeek, indexStart, indexEnd, tmpLedger;
|
|
|
|
if(true, {
|
|
|
|
cuedSeek = (seq != nil);
|
|
|
|
indexStart = msg[1].asInteger;
|
|
|
|
indexEnd = ledger.size - if(cuedSeek, {2}, {1});
|
|
|
|
|
|
|
|
//tmp for testing transcription
|
|
|
|
indexEnd = (indexStart+5);
|
|
|
|
|
|
|
|
//ledger.postln;
|
|
|
|
if(((indexStart == (ledger.size - 1)) && cuedSeek).not, {
|
|
|
|
var lilyPartLedgerFiles;
|
|
|
|
|
|
|
|
lilyPartLedgerFiles = 4.collect({arg p;
|
|
|
|
File((dir +/+ ".." +/+ "lilypond" +/+ "includes" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly").standardizePath, "w");
|
|
|
|
});
|
|
|
|
|
|
|
|
ledger[indexStart..indexEnd].do({arg uid, index;
|
|
|
|
var path, file, fileString, tSeq, refUID, refChord;
|
|
|
|
path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath;
|
|
|
|
file = File(path, "r");
|
|
|
|
fileString = file.readAllString;
|
|
|
|
tSeq = msgInterpret.value(fileString.parseJSON["music_data"]);
|
|
|
|
refUID = msgInterpret.value(fileString.parseJSON["ref_uid"]);
|
|
|
|
file.close;
|
|
|
|
|
|
|
|
//uid.postln;
|
|
|
|
//(refUID == "nil").postln;
|
|
|
|
|
|
|
|
refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]];
|
|
|
|
|
|
|
|
if(refUID != "nil", {
|
|
|
|
path = (resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath;
|
|
|
|
file = File(path, "r");
|
|
|
|
refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last;
|
|
|
|
file.close;
|
|
|
|
});
|
|
|
|
|
|
|
|
if(index != indexEnd, {
|
|
|
|
~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath);
|
|
|
|
}, {
|
|
|
|
~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath, addr, "/transcribe_all");
|
|
|
|
});
|
|
|
|
|
|
|
|
lilyPartLedgerFiles.do({arg f, p;
|
|
|
|
f.write("\\include \"" ++ resourceDir +/+ uid +/+ "lilypond" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly\"\n");
|
|
|
|
});
|
|
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
lilyPartLedgerFiles.do({arg f;
|
|
|
|
f.close
|
|
|
|
});
|
|
|
|
});
|
|
|
|
/*
|
|
|
|
if(cuedSeek, {
|
|
|
|
var path, file;
|
|
|
|
path = (dir +/+ ".." +/+ "resources/tmp/tmp_mus_model" ++ ".json").standardizePath;
|
|
|
|
file = File(path, "r");
|
|
|
|
pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]);
|
|
|
|
file.close;
|
|
|
|
});
|
|
|
|
*/
|
|
|
|
}, {
|
|
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
}, \transcribe_all);
|
|
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|