You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
arts/flow/artsflow.idl

567 lines
16 KiB

/*
Copyright (C) 2000 Stefan Westerfeld
stefan@space.twc.de
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Library General Public
License as published by the Free Software Foundation; either
version 2 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details.
You should have received a copy of the GNU Library General Public License
along with this library; see the file COPYING.LIB. If not, write to
the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
*/
/*
* arts.idl - MCOP port. What's missing currently in MCOP?
*
* - namespaces (module)
*/
module Arts { // analog real time synthesizer
enum AutoSuspendState { asNoSuspend, asSuspend, asSuspendStop, asSuspendMask = 0x3,
asProducer = 0x10, asConsumer = 0x20, asDirectionMask = 0x30 };
/**
* The SynthModule interface is the base for all modules containing streams.
*
* There are two goals achieved by this interface. On one side, there is
* functionality which users of stream carrying modules want to use (which
* is: start streaming, stop streaming).
*
* On the other hand, there is functionality which the flow system will use
* to achieve these goals.
*/
interface SynthModule {
// interface for users of this module
/**
* This function starts the streaming (e.g. the module will start
* producing samples) - if you write a module, do not reimplement this,
* instead reimplement streamInit/streamStart
*/
void start();
/**
* This function stops the streaming - if you write a plugin, do not
* reimplement this, instead reimplement streamEnd
*/
void stop();
// interface for people implementing modules
/**
* this is supposed to be the initialization every module passes after
* all attributes have been set up (e.g. you can see which file to open,
* how to initialize your filter coefficients or whatever)
*/
void streamInit();
/**
* starts the I/O of the module
*/
void streamStart();
/**
* stop the thing again, and free data possibly allocated in streamInit
*/
void streamEnd();
/**
* If you run a mixer desk (without anything connected), no calculations
* need to be done - since the output is silent anyway. For this reason,
* there exists this autosuspend attribute. It allows the flow system
* to detect the idle condition, and start suspending the calculations,
* until something "important" happens again.
*
* There are three possible values:
*
* @li asNoSuspend - this one is appropriate when you have a module that
* is active by itself
* @li asSuspend - this one is appropriate for modules that "do nothing"
* by themselves
* @li asSuspendStop - this one is for modules that should be stopped, when
* the system gets suspended, and restarted when the
* system will start again - an example for this is
* soundcard output
*
* A module should choose asSuspend (or asSuspendStop) only if the
* following conditions are true:
*
* @li given constant inputs (like 3.0 on all ports), the module will
* give constant output after some time
* @li given only 0.0 inputs, the module will give only 0.0 outputs
* after some time
* @li the module does not synchronize itself through signal flow (i.e.
* a midi sequence which "knows" when a second has passed through
* the signal flow breaks when suspension happens)
* @li the module can't be brought to do something with a method
* invocation (i.e. a module which starts generating noise for
* a second whenever the noise() method is called is not suspendable)
* @li the module has no internal state that changes over time when only
* constant inputs are given
*
* Typical examples for suspendable modules are arithmetic operations,
* filters, delay/hall/reverb.
*
* Typical examples for non-suspendable modules are sequences, midi stuff,
* oscillators, sample players,...
*
* To deal with modules which either input data from some external source
* (i.e. soundcard input) or output data to some external destination,
* (i.e. soundcard output) the following flags are available:
*
* @li asProducer - set this flag for modules which fulfill the conditions
* for a suspendable module, but produce non-zero output
* even when left alone
* @li asConsumer - set this flag for modules which write the data to
* some external destination - that is - definitely
* require constant input to be suspended
*
* The suspension algorithm will first divide the graph of modules into
* subgraphs of interconnected modules. A subgraph is suspendable if
* all of its modules are suspendable and the subgraph does not contain
* producer(s) and consumer(s) at the same time.
*
* Finally, our module graph is suspendable if all its subgraphs are.
*/
readonly attribute AutoSuspendState autoSuspend;
};
/**
* Plays a stream of audio data to the soundcard
*/
interface Synth_PLAY : SynthModule {
// attribute string channels;
default in audio stream invalue_left,invalue_right;
};
/**
* Records a stream of audio data from the soundcard
*/
interface Synth_RECORD : SynthModule {
// attribute string channels;
default out audio stream left,right;
};
/**
* A frequency generator
*
* This kind of object is used to create frequencies. Oscillators are connected
* at the output of this object
*/
interface Synth_FREQUENCY : SynthModule {
in audio stream frequency;
out audio stream pos;
};
/**
* A sine wave
*/
interface Synth_WAVE_SIN : SynthModule {
in audio stream pos;
out audio stream outvalue;
};
/**
* A module which mixes an arbitary number of audio streams
*/
interface Synth_MULTI_ADD : SynthModule {
in multi audio stream invalue;
out audio stream outvalue;
};
/**
* A module which adds two audio streams
*/
interface Synth_ADD : SynthModule {
default in audio stream invalue1,invalue2;
out audio stream outvalue;
};
/**
* Multiplies two audio streams
*/
interface Synth_MUL : SynthModule {
in audio stream invalue1,invalue2;
out audio stream outvalue;
default invalue1, invalue2;
};
/**
* This plays a wave file
*/
interface Synth_PLAY_WAV : SynthModule {
/**
* How fast should it be played? 1.0 = normal speed
*/
attribute float speed;
/**
* Which file should be played
*/
attribute string filename;
/**
* Is true as soon as the file is finished
*/
readonly attribute boolean finished;
out audio stream left, right;
default left, right;
};
/**
* sends data to a bus - busses are dynamic N:M connections - all signals
* from all uplinks are mixed together, and sent to all downlinks
*/
interface Synth_BUS_UPLINK : SynthModule {
/**
* the name of the bus to use
*/
attribute string busname;
default in audio stream left,right;
};
/**
* receives data from a bus - busses are dynamic N:M connections - all signals
* from all uplinks are mixed together, and sent to all downlinks
*/
interface Synth_BUS_DOWNLINK : SynthModule {
/**
* the name of the bus to use
*/
attribute string busname;
default out audio stream left,right;
};
/**
* Byte stream to audio conversion object
*
* Converts an asynchronous byte stream to a synchronous audio stream
*/
interface ByteStreamToAudio : SynthModule {
attribute long samplingRate;
attribute long channels;
attribute long bits;
/**
* is conversion currently running, or is it stalled due to the fact
* that there is not enough input input?
*/
readonly attribute boolean running;
async in byte stream indata;
out audio stream left,right;
default left;
default right;
};
/**
* Audio to Byte stream conversion object
*
* Converts a synchronous audio stream to an asynchronous byte stream
*/
interface AudioToByteStream : SynthModule {
attribute long samplingRate;
attribute long channels;
attribute long bits;
async out byte stream outdata;
in audio stream left,right;
default left;
default right;
};
/**
* Base interface for all stereo effects
*/
interface StereoEffect : SynthModule {
default in audio stream inleft, inright;
default out audio stream outleft, outright;
};
/**
* this is a simple clipping stereo volume control
*/
interface StereoVolumeControl : StereoEffect {
attribute float scaleFactor;
readonly attribute float currentVolumeLeft;
readonly attribute float currentVolumeRight;
};
/**
* A funny FFT scope
*/
interface StereoFFTScope : StereoEffect {
readonly attribute sequence<float> scope;
};
/**
* A stack of stereo effects
*/
interface StereoEffectStack : StereoEffect {
/**
* inserts an effect at the top side (= directly after the input)
*
* @returns an ID which can be used to remove the effect again
*/
long insertTop(StereoEffect effect, string name);
/**
* inserts an effect at the bottom (= close to the output) side
*
* @returns an ID which can be used to remove the effect again
*/
long insertBottom(StereoEffect effect, string name);
/**
* removes an effect again
*/
void remove(long ID);
};
/*
* Audio Manager stuff
*/
enum AudioManagerDirection { amPlay, amRecord };
/**
* Information structure for audio manager clients
*/
struct AudioManagerInfo {
long ID;
string destination;
AudioManagerDirection direction;
string title, autoRestoreID;
};
/**
* an audio manager client
*/
interface AudioManagerClient {
readonly attribute long ID;
attribute AudioManagerDirection direction;
attribute string title, autoRestoreID;
void constructor(AudioManagerDirection direction, string title,
string autoRestoreID);
};
/**
* The audio manager interface
*/
interface AudioManager {
/**
* a list of destinations, where you can play/record data to/from
*/
readonly attribute sequence<string> destinations;
/**
* a list of clients
*/
readonly attribute sequence<AudioManagerInfo> clients;
/**
* this is incremented each time a change is made (i.e. new client attached)
* TODO: SHOULD GO AWAY WITH ATTRIBUTE WATCHING
*/
readonly attribute long changes;
/**
* this is used to route a client to another destination
*/
void setDestination(long ID, string destination);
};
/**
* This is a virtual output port, which you use to play data. Where exactly
* this data gets played is managed by the audiomanager.
*
* Creation: there are two ways to initialize a Synth_AMAN_PLAY - one is
* to set title and autoRestoreID to sensible (non empty) values. The other
* is to pass an already initialized AudioManagerClient on the constructor.
*/
interface Synth_AMAN_PLAY : SynthModule {
attribute string title, autoRestoreID;
void constructor(AudioManagerClient client);
default in audio stream left, right;
};
/**
* This is a virtual input port, which you use to record data. Where this
* data comes from is in turn managed by the audiomanager.
*
* Creation: there are two ways to initialize a Synth_AMAN_RECORD - one is
* to set title and autoRestoreID to sensible (non empty) values. The other
* is to pass an already initialized AudioManagerClient on the constructor.
*/
interface Synth_AMAN_RECORD : SynthModule {
attribute string title, autoRestoreID;
void constructor(AudioManagerClient client);
default out audio stream left, right;
};
/* --------------------------------------------------------------------- */
/**
* Wraps a datahandle. That is an abstraction for a float value array
* which can be directly loaded data from a file or have some
* processing stages in between (caching, reversing, cropping...)
* which are hidden to this interface.
* In contrast to the underlying C++ API, this datahandle is already
* open()ed after creation, so you can access its information (like
* channelCount) without further action.
* A datahandle normally has one more important function: read() which
* is not wrapped in MCOP because of the overhead of the data
* transfer. (If there is need for sth. like that in the future,
* one could maybe find a solution.)
*/
interface DataHandle {
readonly attribute long bitDepth;
readonly attribute long channelCount;
readonly attribute long valueCount;
/**
* error code open() returned
*/
readonly attribute long errorNo;
};
/**
* Represents a datahandle which delivers the data from the underlying
* sourceDatahandle in reverse order.
*/
interface ReversedDataHandle : DataHandle {
void init(DataHandle sourceHandle);
};
/**
* Represents a datahandle which delivers an "inner" part of the data
* from the underlying sourceDatahandle. You can cut away parts at the
* start and/or the end with this.
*/
interface CroppedDataHandle : DataHandle {
void init(DataHandle sourceHandle,
long headCutValueCount,
long tailCutValueCount);
};
/**
* Represents a datahandle which delivers the data from the underlying
* sourceDatahandle without the "inner" part containing the values
* [cutOffset..cutOffset+cutValueCount-1], which will be cut away.
*/
interface CutDataHandle : DataHandle {
void init(DataHandle sourceHandle,
long cutOffset,
long cutValueCount);
};
/**
* DataHandlePlay uses a gsl_wave_osc to play back data from a
* DataHandle using sophisticated anti-aliasing filtering and caching
* techniques. (Though not implemented at the time of writing this, it
* will be optimized for cases where the anti-aliasing is not needed
* because the mixerFrequency equals the current soundserver's.)
*/
interface DataHandlePlay : SynthModule {
/**
* Which data should be played?
*/
attribute DataHandle handle;
/**
* What is the normal mixer frequency the data from the handle
* should be played back at? (default: current mixing frequency
* of the soundserver, e.g. 44100)
*/
attribute float mixerFrequency;
/**
* Which channel of the datahandle should by played?
* (defaults to 0 = the first channel)
*/
attribute long channelIndex;
/**
* How fast should the data be played?
* (defaults to 1.0 = normal speed, see mixerFrequency)
*/
attribute float speed;
/**
* Current position while playing, in fact it's the index in the
* datahandle, so 0 <= pos < handle.valueCount
*/
attribute long pos;
/**
* Is true as soon as the file is finished
*/
readonly attribute boolean finished;
/**
* Can be used to pause and/or continue playing
*/
attribute boolean paused;
default out audio stream outvalue;
DataHandlePlay clone();
};
/**
* DataHandle which represents sample data loaded from a file. Note
* that the samples from all channels are interleaved, that is, the
* samples of the first channel in a stereo file are found at offsets
* 0,2,4,6,.. etc.
*/
interface WaveDataHandle : DataHandle {
/**
* Properties of the loaded sample data. Note that those
* properties are only available from a WaveDataHandle, but may be
* available from a DataHandle in the future.
*/
readonly attribute float mixerFrequency;
readonly attribute float oscillatorFrequency;
/**
* Load the first wavechunk from a file and return true on
* success. A more specific error code is not available at the
* moment.
*/
boolean load(string filename);
/**
* Load a specific wavechunk from a file and return true on
* success. A more specific error code is not available at the
* moment.
*/
boolean load(string filename,
long waveIndex, long chunkIndex);
/**
* Return true if and only if a wavechunk was successfully loaded
* from a file.
*/
readonly attribute boolean isLoaded;
/**
* Creates a DataHandlePlay object with the important attributes
* handle, mixerFrequency and channelCount already set to play
* this WaveDataHandle.
*/
DataHandlePlay createPlayer();
};
};